body_hash
stringlengths
64
64
body
stringlengths
23
109k
docstring
stringlengths
1
57k
path
stringlengths
4
198
name
stringlengths
1
115
repository_name
stringlengths
7
111
repository_stars
float64
0
191k
lang
stringclasses
1 value
body_without_docstring
stringlengths
14
108k
unified
stringlengths
45
133k
3369e65dc1ba9d20650ad53bf2f2f4b9be3eeae0546d77f685f33c30b63cdd4c
@pytest.fixture def xonsh_execer(monkeypatch): 'Initiate the Execer with a mocked nop `load_builtins`' monkeypatch.setattr('xonsh.built_ins.load_builtins.__code__', (lambda *args, **kwargs: None).__code__) added_session = False if (not hasattr(builtins, '__xonsh__')): added_session = True ensure_attached_session(monkeypatch, XonshSession()) execer = Execer(unload=False) builtins.__xonsh__.execer = execer (yield execer) if added_session: monkeypatch.delattr(builtins, '__xonsh__', raising=False)
Initiate the Execer with a mocked nop `load_builtins`
tests/conftest.py
xonsh_execer
jmoranos/xonsh
3
python
@pytest.fixture def xonsh_execer(monkeypatch): monkeypatch.setattr('xonsh.built_ins.load_builtins.__code__', (lambda *args, **kwargs: None).__code__) added_session = False if (not hasattr(builtins, '__xonsh__')): added_session = True ensure_attached_session(monkeypatch, XonshSession()) execer = Execer(unload=False) builtins.__xonsh__.execer = execer (yield execer) if added_session: monkeypatch.delattr(builtins, '__xonsh__', raising=False)
@pytest.fixture def xonsh_execer(monkeypatch): monkeypatch.setattr('xonsh.built_ins.load_builtins.__code__', (lambda *args, **kwargs: None).__code__) added_session = False if (not hasattr(builtins, '__xonsh__')): added_session = True ensure_attached_session(monkeypatch, XonshSession()) execer = Execer(unload=False) builtins.__xonsh__.execer = execer (yield execer) if added_session: monkeypatch.delattr(builtins, '__xonsh__', raising=False)<|docstring|>Initiate the Execer with a mocked nop `load_builtins`<|endoftext|>
c0b14b503c569f7bdae86c0ab7ea7cbd8aa6540dcf30f482a7a5879e2277da62
@pytest.fixture def monkeypatch_stderr(monkeypatch): 'Monkeypath sys.stderr with no ResourceWarning.' with open(os.devnull, 'w') as fd: monkeypatch.setattr(sys, 'stderr', fd) (yield)
Monkeypath sys.stderr with no ResourceWarning.
tests/conftest.py
monkeypatch_stderr
jmoranos/xonsh
3
python
@pytest.fixture def monkeypatch_stderr(monkeypatch): with open(os.devnull, 'w') as fd: monkeypatch.setattr(sys, 'stderr', fd) (yield)
@pytest.fixture def monkeypatch_stderr(monkeypatch): with open(os.devnull, 'w') as fd: monkeypatch.setattr(sys, 'stderr', fd) (yield)<|docstring|>Monkeypath sys.stderr with no ResourceWarning.<|endoftext|>
bf5a693c051cd9a950059804793db888d60e660d7ede1b69837187556b120d9a
@pytest.fixture def xonsh_builtins(monkeypatch, xonsh_events): 'Mock out most of the builtins xonsh attributes.' old_builtins = set(dir(builtins)) execer = getattr(getattr(builtins, '__xonsh__', None), 'execer', None) session = XonshSession(execer=execer, ctx={}) ensure_attached_session(monkeypatch, session) builtins.__xonsh__.env = DummyEnv() if ON_WINDOWS: builtins.__xonsh__.env['PATHEXT'] = ['.EXE', '.BAT', '.CMD'] builtins.__xonsh__.shell = DummyShell() builtins.__xonsh__.help = (lambda x: x) builtins.__xonsh__.glob = glob.glob builtins.__xonsh__.exit = False builtins.__xonsh__.superhelp = (lambda x: x) builtins.__xonsh__.pathsearch = pathsearch builtins.__xonsh__.globsearch = globsearch builtins.__xonsh__.regexsearch = regexsearch builtins.__xonsh__.regexpath = (lambda x: []) builtins.__xonsh__.expand_path = (lambda x: x) builtins.__xonsh__.subproc_captured = sp builtins.__xonsh__.subproc_uncaptured = sp builtins.__xonsh__.stdout_uncaptured = None builtins.__xonsh__.stderr_uncaptured = None builtins.__xonsh__.ensure_list_of_strs = ensure_list_of_strs builtins.__xonsh__.commands_cache = DummyCommandsCache() builtins.__xonsh__.all_jobs = {} builtins.__xonsh__.list_of_strs_or_callables = list_of_strs_or_callables builtins.__xonsh__.list_of_list_of_strs_outer_product = list_of_list_of_strs_outer_product builtins.__xonsh__.eval_fstring_field = eval_fstring_field builtins.__xonsh__.history = DummyHistory() builtins.__xonsh__.subproc_captured_stdout = sp builtins.__xonsh__.subproc_captured_inject = sp builtins.__xonsh__.subproc_captured_object = sp builtins.__xonsh__.subproc_captured_hiddenobject = sp builtins.__xonsh__.enter_macro = enter_macro builtins.__xonsh__.completers = None builtins.__xonsh__.call_macro = call_macro builtins.__xonsh__.enter_macro = enter_macro builtins.__xonsh__.path_literal = path_literal builtins.__xonsh__.builtins = _BuiltIns(execer=execer) builtins.evalx = eval builtins.execx = None builtins.compilex = None builtins.aliases = {} builtins.events = xonsh_events (yield builtins) monkeypatch.delattr(builtins, '__xonsh__', raising=False) for attr in (set(dir(builtins)) - old_builtins): if hasattr(builtins, attr): delattr(builtins, attr) tasks.clear()
Mock out most of the builtins xonsh attributes.
tests/conftest.py
xonsh_builtins
jmoranos/xonsh
3
python
@pytest.fixture def xonsh_builtins(monkeypatch, xonsh_events): old_builtins = set(dir(builtins)) execer = getattr(getattr(builtins, '__xonsh__', None), 'execer', None) session = XonshSession(execer=execer, ctx={}) ensure_attached_session(monkeypatch, session) builtins.__xonsh__.env = DummyEnv() if ON_WINDOWS: builtins.__xonsh__.env['PATHEXT'] = ['.EXE', '.BAT', '.CMD'] builtins.__xonsh__.shell = DummyShell() builtins.__xonsh__.help = (lambda x: x) builtins.__xonsh__.glob = glob.glob builtins.__xonsh__.exit = False builtins.__xonsh__.superhelp = (lambda x: x) builtins.__xonsh__.pathsearch = pathsearch builtins.__xonsh__.globsearch = globsearch builtins.__xonsh__.regexsearch = regexsearch builtins.__xonsh__.regexpath = (lambda x: []) builtins.__xonsh__.expand_path = (lambda x: x) builtins.__xonsh__.subproc_captured = sp builtins.__xonsh__.subproc_uncaptured = sp builtins.__xonsh__.stdout_uncaptured = None builtins.__xonsh__.stderr_uncaptured = None builtins.__xonsh__.ensure_list_of_strs = ensure_list_of_strs builtins.__xonsh__.commands_cache = DummyCommandsCache() builtins.__xonsh__.all_jobs = {} builtins.__xonsh__.list_of_strs_or_callables = list_of_strs_or_callables builtins.__xonsh__.list_of_list_of_strs_outer_product = list_of_list_of_strs_outer_product builtins.__xonsh__.eval_fstring_field = eval_fstring_field builtins.__xonsh__.history = DummyHistory() builtins.__xonsh__.subproc_captured_stdout = sp builtins.__xonsh__.subproc_captured_inject = sp builtins.__xonsh__.subproc_captured_object = sp builtins.__xonsh__.subproc_captured_hiddenobject = sp builtins.__xonsh__.enter_macro = enter_macro builtins.__xonsh__.completers = None builtins.__xonsh__.call_macro = call_macro builtins.__xonsh__.enter_macro = enter_macro builtins.__xonsh__.path_literal = path_literal builtins.__xonsh__.builtins = _BuiltIns(execer=execer) builtins.evalx = eval builtins.execx = None builtins.compilex = None builtins.aliases = {} builtins.events = xonsh_events (yield builtins) monkeypatch.delattr(builtins, '__xonsh__', raising=False) for attr in (set(dir(builtins)) - old_builtins): if hasattr(builtins, attr): delattr(builtins, attr) tasks.clear()
@pytest.fixture def xonsh_builtins(monkeypatch, xonsh_events): old_builtins = set(dir(builtins)) execer = getattr(getattr(builtins, '__xonsh__', None), 'execer', None) session = XonshSession(execer=execer, ctx={}) ensure_attached_session(monkeypatch, session) builtins.__xonsh__.env = DummyEnv() if ON_WINDOWS: builtins.__xonsh__.env['PATHEXT'] = ['.EXE', '.BAT', '.CMD'] builtins.__xonsh__.shell = DummyShell() builtins.__xonsh__.help = (lambda x: x) builtins.__xonsh__.glob = glob.glob builtins.__xonsh__.exit = False builtins.__xonsh__.superhelp = (lambda x: x) builtins.__xonsh__.pathsearch = pathsearch builtins.__xonsh__.globsearch = globsearch builtins.__xonsh__.regexsearch = regexsearch builtins.__xonsh__.regexpath = (lambda x: []) builtins.__xonsh__.expand_path = (lambda x: x) builtins.__xonsh__.subproc_captured = sp builtins.__xonsh__.subproc_uncaptured = sp builtins.__xonsh__.stdout_uncaptured = None builtins.__xonsh__.stderr_uncaptured = None builtins.__xonsh__.ensure_list_of_strs = ensure_list_of_strs builtins.__xonsh__.commands_cache = DummyCommandsCache() builtins.__xonsh__.all_jobs = {} builtins.__xonsh__.list_of_strs_or_callables = list_of_strs_or_callables builtins.__xonsh__.list_of_list_of_strs_outer_product = list_of_list_of_strs_outer_product builtins.__xonsh__.eval_fstring_field = eval_fstring_field builtins.__xonsh__.history = DummyHistory() builtins.__xonsh__.subproc_captured_stdout = sp builtins.__xonsh__.subproc_captured_inject = sp builtins.__xonsh__.subproc_captured_object = sp builtins.__xonsh__.subproc_captured_hiddenobject = sp builtins.__xonsh__.enter_macro = enter_macro builtins.__xonsh__.completers = None builtins.__xonsh__.call_macro = call_macro builtins.__xonsh__.enter_macro = enter_macro builtins.__xonsh__.path_literal = path_literal builtins.__xonsh__.builtins = _BuiltIns(execer=execer) builtins.evalx = eval builtins.execx = None builtins.compilex = None builtins.aliases = {} builtins.events = xonsh_events (yield builtins) monkeypatch.delattr(builtins, '__xonsh__', raising=False) for attr in (set(dir(builtins)) - old_builtins): if hasattr(builtins, attr): delattr(builtins, attr) tasks.clear()<|docstring|>Mock out most of the builtins xonsh attributes.<|endoftext|>
f02c3a90926c98a4df4494f22480ac60a221268c00d2d179f440dcf5b261cd45
def pytest_configure(config): 'Abort test run if --flake8 requested, since it would hang on parser_test.py' if config.getoption('--flake8', ''): pytest.exit('pytest-flake8 no longer supported, use flake8 instead.')
Abort test run if --flake8 requested, since it would hang on parser_test.py
tests/conftest.py
pytest_configure
jmoranos/xonsh
3
python
def pytest_configure(config): if config.getoption('--flake8', ): pytest.exit('pytest-flake8 no longer supported, use flake8 instead.')
def pytest_configure(config): if config.getoption('--flake8', ): pytest.exit('pytest-flake8 no longer supported, use flake8 instead.')<|docstring|>Abort test run if --flake8 requested, since it would hang on parser_test.py<|endoftext|>
d131fa5c2761e6bd0c1d2e0c525784b15e392e6add161ef9b10c0d188027ad7c
def __init__(self, size: int) -> None: '\n initializes the structure with the\n specified size. Initially, all nodes\n are rooted at themselves (u[i] = i)\n\n Args:\n size (int): number of nodes\n ' self.root = [idx for idx in range(size)]
initializes the structure with the specified size. Initially, all nodes are rooted at themselves (u[i] = i) Args: size (int): number of nodes
py/dcp/leetcode/disjoint_set/quick_union.py
__init__
bmoretz/Daily-Coding-Problem
1
python
def __init__(self, size: int) -> None: '\n initializes the structure with the\n specified size. Initially, all nodes\n are rooted at themselves (u[i] = i)\n\n Args:\n size (int): number of nodes\n ' self.root = [idx for idx in range(size)]
def __init__(self, size: int) -> None: '\n initializes the structure with the\n specified size. Initially, all nodes\n are rooted at themselves (u[i] = i)\n\n Args:\n size (int): number of nodes\n ' self.root = [idx for idx in range(size)]<|docstring|>initializes the structure with the specified size. Initially, all nodes are rooted at themselves (u[i] = i) Args: size (int): number of nodes<|endoftext|>
90017bb76d812e5e0529915c5d81321ed6597145bb1494bd8950e541a3a85953
def find(self, x: int) -> int: '\n gets the parent of a node\n\n Args:\n x (int): node\n\n Returns:\n int: parent\n ' while (x != self.root[x]): x = self.root[x] return x
gets the parent of a node Args: x (int): node Returns: int: parent
py/dcp/leetcode/disjoint_set/quick_union.py
find
bmoretz/Daily-Coding-Problem
1
python
def find(self, x: int) -> int: '\n gets the parent of a node\n\n Args:\n x (int): node\n\n Returns:\n int: parent\n ' while (x != self.root[x]): x = self.root[x] return x
def find(self, x: int) -> int: '\n gets the parent of a node\n\n Args:\n x (int): node\n\n Returns:\n int: parent\n ' while (x != self.root[x]): x = self.root[x] return x<|docstring|>gets the parent of a node Args: x (int): node Returns: int: parent<|endoftext|>
91f5409bfd27a892a673efdb3ddeca5ef1ef253d98ddc66ddbab5d7e9a7206ce
def union(self, x: int, y: int) -> None: '\n quick union:\n\n set the parent of x to the parent of y\n (if not already connected)\n\n Args:\n x (int): x\n y (int): y\n ' (root_x, root_y) = (self.find(x), self.find(y)) if (root_x != root_y): self.root[y] = self.root[root_x]
quick union: set the parent of x to the parent of y (if not already connected) Args: x (int): x y (int): y
py/dcp/leetcode/disjoint_set/quick_union.py
union
bmoretz/Daily-Coding-Problem
1
python
def union(self, x: int, y: int) -> None: '\n quick union:\n\n set the parent of x to the parent of y\n (if not already connected)\n\n Args:\n x (int): x\n y (int): y\n ' (root_x, root_y) = (self.find(x), self.find(y)) if (root_x != root_y): self.root[y] = self.root[root_x]
def union(self, x: int, y: int) -> None: '\n quick union:\n\n set the parent of x to the parent of y\n (if not already connected)\n\n Args:\n x (int): x\n y (int): y\n ' (root_x, root_y) = (self.find(x), self.find(y)) if (root_x != root_y): self.root[y] = self.root[root_x]<|docstring|>quick union: set the parent of x to the parent of y (if not already connected) Args: x (int): x y (int): y<|endoftext|>
820a59fdeb303631632a1fb62248d07dcab1acc7a7af382ab4f832f053bed808
def parse_uri(self, text: Union[(str, URI)]) -> URIRef: '\n Parse input text into URI\n\n :param text: can be one of\n 1. URI, directly return\n 2. prefix:name, query namespace for prefix, return expanded URI\n 3. name, use default namespace to expand it and return it\n :return: URIRef\n ' if self.check_uriref(text): return self.check_uriref(text) elif isinstance(text, str): text = text.strip() m = URI_ABBR_PATTERN.match(text) if m: (prefix, name) = m.groups() base = self.store.namespace((prefix if prefix else '')) if (not base): raise PrefixNotFoundException('Prefix: %s', prefix) return URIRef((base + name)) elif isinstance(text, URI): return self.parse_uri(text.value) raise WrongFormatURIException(text)
Parse input text into URI :param text: can be one of 1. URI, directly return 2. prefix:name, query namespace for prefix, return expanded URI 3. name, use default namespace to expand it and return it :return: URIRef
etk/knowledge_graph/namespacemanager.py
parse_uri
donaq/etk
77
python
def parse_uri(self, text: Union[(str, URI)]) -> URIRef: '\n Parse input text into URI\n\n :param text: can be one of\n 1. URI, directly return\n 2. prefix:name, query namespace for prefix, return expanded URI\n 3. name, use default namespace to expand it and return it\n :return: URIRef\n ' if self.check_uriref(text): return self.check_uriref(text) elif isinstance(text, str): text = text.strip() m = URI_ABBR_PATTERN.match(text) if m: (prefix, name) = m.groups() base = self.store.namespace((prefix if prefix else )) if (not base): raise PrefixNotFoundException('Prefix: %s', prefix) return URIRef((base + name)) elif isinstance(text, URI): return self.parse_uri(text.value) raise WrongFormatURIException(text)
def parse_uri(self, text: Union[(str, URI)]) -> URIRef: '\n Parse input text into URI\n\n :param text: can be one of\n 1. URI, directly return\n 2. prefix:name, query namespace for prefix, return expanded URI\n 3. name, use default namespace to expand it and return it\n :return: URIRef\n ' if self.check_uriref(text): return self.check_uriref(text) elif isinstance(text, str): text = text.strip() m = URI_ABBR_PATTERN.match(text) if m: (prefix, name) = m.groups() base = self.store.namespace((prefix if prefix else )) if (not base): raise PrefixNotFoundException('Prefix: %s', prefix) return URIRef((base + name)) elif isinstance(text, URI): return self.parse_uri(text.value) raise WrongFormatURIException(text)<|docstring|>Parse input text into URI :param text: can be one of 1. URI, directly return 2. prefix:name, query namespace for prefix, return expanded URI 3. name, use default namespace to expand it and return it :return: URIRef<|endoftext|>
7cc4e2cd346859213937983157ed75185422d6b33f9bb227998bdc29aba2e6e0
def bind(self, prefix: str, namespace: str, override=True, replace=True): '\n bind a given namespace to the prefix, forbids same prefix with different namespace\n\n :param prefix:\n :param namespace:\n :param override: if override, rebind, even if the given namespace is already bound to another prefix.\n :param replace: if replace, replace any existing prefix with the new namespace\n ' namespace = URIRef(str(namespace)) if (prefix is None): prefix = '' bound_namespace = self.store.namespace(prefix) if bound_namespace: bound_namespace = URIRef(bound_namespace) if (bound_namespace and (bound_namespace != namespace)): if replace: self.store.bind(prefix, namespace) else: warnings.warn('Prefix ({}, {}) already defined, if want to replace it, set flag replace to True'.format((prefix if prefix else None), self.store.namespace(prefix))) else: bound_prefix = self.store.prefix(namespace) if (bound_prefix is None): self.store.bind(prefix, namespace) elif (bound_prefix == prefix): pass elif (override or bound_prefix.startswith('_')): self.store.bind(prefix, namespace)
bind a given namespace to the prefix, forbids same prefix with different namespace :param prefix: :param namespace: :param override: if override, rebind, even if the given namespace is already bound to another prefix. :param replace: if replace, replace any existing prefix with the new namespace
etk/knowledge_graph/namespacemanager.py
bind
donaq/etk
77
python
def bind(self, prefix: str, namespace: str, override=True, replace=True): '\n bind a given namespace to the prefix, forbids same prefix with different namespace\n\n :param prefix:\n :param namespace:\n :param override: if override, rebind, even if the given namespace is already bound to another prefix.\n :param replace: if replace, replace any existing prefix with the new namespace\n ' namespace = URIRef(str(namespace)) if (prefix is None): prefix = bound_namespace = self.store.namespace(prefix) if bound_namespace: bound_namespace = URIRef(bound_namespace) if (bound_namespace and (bound_namespace != namespace)): if replace: self.store.bind(prefix, namespace) else: warnings.warn('Prefix ({}, {}) already defined, if want to replace it, set flag replace to True'.format((prefix if prefix else None), self.store.namespace(prefix))) else: bound_prefix = self.store.prefix(namespace) if (bound_prefix is None): self.store.bind(prefix, namespace) elif (bound_prefix == prefix): pass elif (override or bound_prefix.startswith('_')): self.store.bind(prefix, namespace)
def bind(self, prefix: str, namespace: str, override=True, replace=True): '\n bind a given namespace to the prefix, forbids same prefix with different namespace\n\n :param prefix:\n :param namespace:\n :param override: if override, rebind, even if the given namespace is already bound to another prefix.\n :param replace: if replace, replace any existing prefix with the new namespace\n ' namespace = URIRef(str(namespace)) if (prefix is None): prefix = bound_namespace = self.store.namespace(prefix) if bound_namespace: bound_namespace = URIRef(bound_namespace) if (bound_namespace and (bound_namespace != namespace)): if replace: self.store.bind(prefix, namespace) else: warnings.warn('Prefix ({}, {}) already defined, if want to replace it, set flag replace to True'.format((prefix if prefix else None), self.store.namespace(prefix))) else: bound_prefix = self.store.prefix(namespace) if (bound_prefix is None): self.store.bind(prefix, namespace) elif (bound_prefix == prefix): pass elif (override or bound_prefix.startswith('_')): self.store.bind(prefix, namespace)<|docstring|>bind a given namespace to the prefix, forbids same prefix with different namespace :param prefix: :param namespace: :param override: if override, rebind, even if the given namespace is already bound to another prefix. :param replace: if replace, replace any existing prefix with the new namespace<|endoftext|>
acb1172bbcdaf5949eec00591b5e3bdbb1d03063c556878ac315badd6788c29a
@staticmethod def check_uriref(text: Union[(str, URI)]) -> Optional[URIRef]: '\n Check if the input text is likely to be an URIRef and return None or URIRef\n ' if isinstance(text, URIRef): return text if isinstance(text, URI): text = text.value if isinstance(text, str): text = text.strip() if URI_PATTERN.match(text.strip()): return URIRef(text)
Check if the input text is likely to be an URIRef and return None or URIRef
etk/knowledge_graph/namespacemanager.py
check_uriref
donaq/etk
77
python
@staticmethod def check_uriref(text: Union[(str, URI)]) -> Optional[URIRef]: '\n \n ' if isinstance(text, URIRef): return text if isinstance(text, URI): text = text.value if isinstance(text, str): text = text.strip() if URI_PATTERN.match(text.strip()): return URIRef(text)
@staticmethod def check_uriref(text: Union[(str, URI)]) -> Optional[URIRef]: '\n \n ' if isinstance(text, URIRef): return text if isinstance(text, URI): text = text.value if isinstance(text, str): text = text.strip() if URI_PATTERN.match(text.strip()): return URIRef(text)<|docstring|>Check if the input text is likely to be an URIRef and return None or URIRef<|endoftext|>
a8a2a7d7df4579affecd64f2cb8a088acb4bce6203ea91fcd9a6472dce0f4b91
def split_uri(self, uri: str): "\n Overwrite rdflib's implementation which has a lot of issues\n " ns = '' for (prefix, namespace) in self.store.namespaces(): if (uri.startswith(namespace) and (len(namespace) > len(ns))): ns = namespace if ns: return (ns, uri[len(ns):]) raise SplitURIWithUnknownPrefix()
Overwrite rdflib's implementation which has a lot of issues
etk/knowledge_graph/namespacemanager.py
split_uri
donaq/etk
77
python
def split_uri(self, uri: str): "\n \n " ns = for (prefix, namespace) in self.store.namespaces(): if (uri.startswith(namespace) and (len(namespace) > len(ns))): ns = namespace if ns: return (ns, uri[len(ns):]) raise SplitURIWithUnknownPrefix()
def split_uri(self, uri: str): "\n \n " ns = for (prefix, namespace) in self.store.namespaces(): if (uri.startswith(namespace) and (len(namespace) > len(ns))): ns = namespace if ns: return (ns, uri[len(ns):]) raise SplitURIWithUnknownPrefix()<|docstring|>Overwrite rdflib's implementation which has a lot of issues<|endoftext|>
f627345170e2a7627bc8170c16b217f372c904db7597f9c144e48e3801b8cee7
def bind_for_master_config(self): '\n Bind must-have namespaces for master config, note RDF and XSD are already bound\n ' self.bind('owl', OWL) self.bind('', 'http://isi.edu/default-ns/')
Bind must-have namespaces for master config, note RDF and XSD are already bound
etk/knowledge_graph/namespacemanager.py
bind_for_master_config
donaq/etk
77
python
def bind_for_master_config(self): '\n \n ' self.bind('owl', OWL) self.bind(, 'http://isi.edu/default-ns/')
def bind_for_master_config(self): '\n \n ' self.bind('owl', OWL) self.bind(, 'http://isi.edu/default-ns/')<|docstring|>Bind must-have namespaces for master config, note RDF and XSD are already bound<|endoftext|>
f0559919ab0948f1f12cbc0418f06edb6e84ee01f35f03a7dcaeecc5318e0e55
def __init__(self, tasks, ncores=None): '\n Schedule tasks to a defined number of processes.\n\n Parameters\n ----------\n tasks : list\n The list of tasks to execute. Tasks must be subclass of\n `multiprocessing.Process`.\n\n ncores : None or int\n The number of cores to use. If `None` is given uses the\n maximum number of CPUs allowed by\n `libs.libututil.parse_ncores` function.\n ' self.num_tasks = len(tasks) self.num_processes = ncores self.num_processes = min(self.num_processes, self.num_tasks) _n = self.num_processes job_list = [tasks[i::_n] for i in range(_n)] self.task_list = [Worker(jobs) for jobs in job_list] logger.info(f'{self.num_tasks} tasks ready.')
Schedule tasks to a defined number of processes. Parameters ---------- tasks : list The list of tasks to execute. Tasks must be subclass of `multiprocessing.Process`. ncores : None or int The number of cores to use. If `None` is given uses the maximum number of CPUs allowed by `libs.libututil.parse_ncores` function.
src/haddock/libs/libparallel.py
__init__
joaomcteixeira/haddock3
21
python
def __init__(self, tasks, ncores=None): '\n Schedule tasks to a defined number of processes.\n\n Parameters\n ----------\n tasks : list\n The list of tasks to execute. Tasks must be subclass of\n `multiprocessing.Process`.\n\n ncores : None or int\n The number of cores to use. If `None` is given uses the\n maximum number of CPUs allowed by\n `libs.libututil.parse_ncores` function.\n ' self.num_tasks = len(tasks) self.num_processes = ncores self.num_processes = min(self.num_processes, self.num_tasks) _n = self.num_processes job_list = [tasks[i::_n] for i in range(_n)] self.task_list = [Worker(jobs) for jobs in job_list] logger.info(f'{self.num_tasks} tasks ready.')
def __init__(self, tasks, ncores=None): '\n Schedule tasks to a defined number of processes.\n\n Parameters\n ----------\n tasks : list\n The list of tasks to execute. Tasks must be subclass of\n `multiprocessing.Process`.\n\n ncores : None or int\n The number of cores to use. If `None` is given uses the\n maximum number of CPUs allowed by\n `libs.libututil.parse_ncores` function.\n ' self.num_tasks = len(tasks) self.num_processes = ncores self.num_processes = min(self.num_processes, self.num_tasks) _n = self.num_processes job_list = [tasks[i::_n] for i in range(_n)] self.task_list = [Worker(jobs) for jobs in job_list] logger.info(f'{self.num_tasks} tasks ready.')<|docstring|>Schedule tasks to a defined number of processes. Parameters ---------- tasks : list The list of tasks to execute. Tasks must be subclass of `multiprocessing.Process`. ncores : None or int The number of cores to use. If `None` is given uses the maximum number of CPUs allowed by `libs.libututil.parse_ncores` function.<|endoftext|>
d248d63bd5d15608b015410c4e424b5aec79640c3438eff1b2794f9d89fb4afd
@plugin.register(chain='scrooge', requires=[]) def owner(today, **kwargs): '\n Updates Owners from CMDB\n ' new_owners = total = 0 for data in get_owners(): if update_owner(data, today): new_owners += 1 total += 1 return (True, '{} new owner(s), {} updated, {} total'.format(new_owners, (total - new_owners), total))
Updates Owners from CMDB
src/ralph_scrooge/plugins/collect/owner.py
owner
xliiv/ralph_pricing
0
python
@plugin.register(chain='scrooge', requires=[]) def owner(today, **kwargs): '\n \n ' new_owners = total = 0 for data in get_owners(): if update_owner(data, today): new_owners += 1 total += 1 return (True, '{} new owner(s), {} updated, {} total'.format(new_owners, (total - new_owners), total))
@plugin.register(chain='scrooge', requires=[]) def owner(today, **kwargs): '\n \n ' new_owners = total = 0 for data in get_owners(): if update_owner(data, today): new_owners += 1 total += 1 return (True, '{} new owner(s), {} updated, {} total'.format(new_owners, (total - new_owners), total))<|docstring|>Updates Owners from CMDB<|endoftext|>
0d2876114c14481c60c7dc05f7a5e9ce833a9cba7cf5c0b29919bb6219a1b037
def test_wallabag(self): '\n Test if the creation of the wallabag object looks fine\n ' t = self.create_triggerservice() d = self.create_wallabag(t) self.assertTrue(isinstance(d, Wallabag)) self.assertEqual(d.show(), ('My Wallabag %s' % d.url)) self.assertEqual(d.__str__(), ('%s' % d.url))
Test if the creation of the wallabag object looks fine
th_wallabag/tests.py
test_wallabag
luisriverag/django-th
1,069
python
def test_wallabag(self): '\n \n ' t = self.create_triggerservice() d = self.create_wallabag(t) self.assertTrue(isinstance(d, Wallabag)) self.assertEqual(d.show(), ('My Wallabag %s' % d.url)) self.assertEqual(d.__str__(), ('%s' % d.url))
def test_wallabag(self): '\n \n ' t = self.create_triggerservice() d = self.create_wallabag(t) self.assertTrue(isinstance(d, Wallabag)) self.assertEqual(d.show(), ('My Wallabag %s' % d.url)) self.assertEqual(d.__str__(), ('%s' % d.url))<|docstring|>Test if the creation of the wallabag object looks fine<|endoftext|>
c4bb10bea5567bd76d546f3a0bb02fe1945711576551c5fc0aa42832d37bb58d
def test_valid_provider_form(self): '\n test if that form is a valid provider one\n ' t = self.create_triggerservice() d = self.create_wallabag(t) data = {'url': d.url} form = WallabagProviderForm(data=data) self.assertTrue(form.is_valid())
test if that form is a valid provider one
th_wallabag/tests.py
test_valid_provider_form
luisriverag/django-th
1,069
python
def test_valid_provider_form(self): '\n \n ' t = self.create_triggerservice() d = self.create_wallabag(t) data = {'url': d.url} form = WallabagProviderForm(data=data) self.assertTrue(form.is_valid())
def test_valid_provider_form(self): '\n \n ' t = self.create_triggerservice() d = self.create_wallabag(t) data = {'url': d.url} form = WallabagProviderForm(data=data) self.assertTrue(form.is_valid())<|docstring|>test if that form is a valid provider one<|endoftext|>
2f42b47c2def7856bef61f76e9df7952968a99f100663a575cb10eb6c0a68557
def test_valid_consumer_form(self): '\n test if that form is a valid consumer one\n ' t = self.create_triggerservice() d = self.create_wallabag(t) data = {'url': d.url} form = WallabagConsumerForm(data=data) self.assertTrue(form.is_valid())
test if that form is a valid consumer one
th_wallabag/tests.py
test_valid_consumer_form
luisriverag/django-th
1,069
python
def test_valid_consumer_form(self): '\n \n ' t = self.create_triggerservice() d = self.create_wallabag(t) data = {'url': d.url} form = WallabagConsumerForm(data=data) self.assertTrue(form.is_valid())
def test_valid_consumer_form(self): '\n \n ' t = self.create_triggerservice() d = self.create_wallabag(t) data = {'url': d.url} form = WallabagConsumerForm(data=data) self.assertTrue(form.is_valid())<|docstring|>test if that form is a valid consumer one<|endoftext|>
b0a138dc521e7d732493b13ba4fb5b331cf8bf615779fe973c6756ddbe02df8b
def __init__(self, experiment_directory=None): "\n Base class for all experiments to derive from.\n\n Parameters\n ----------\n experiment_directory : str\n Specify a directory for the experiment. If it doesn't exist already, it'll be\n created along with 4 subfolders: 'Configurations', 'Logs , 'Weights' and 'Plots'.\n " self._experiment_directory = None self._step = None self._epoch = None self._config = {} self._meta_config = {'exclude_attrs_from_save': [], 'stateless_attributes': [], 'stateful_attributes': []} self._cache = {} self._argv = None self._default_dispatch = None self.experiment_directory = experiment_directory super(BaseExperiment, self).__init__()
Base class for all experiments to derive from. Parameters ---------- experiment_directory : str Specify a directory for the experiment. If it doesn't exist already, it'll be created along with 4 subfolders: 'Configurations', 'Logs , 'Weights' and 'Plots'.
speedrun/core.py
__init__
nasimrahaman/speedrun
76
python
def __init__(self, experiment_directory=None): "\n Base class for all experiments to derive from.\n\n Parameters\n ----------\n experiment_directory : str\n Specify a directory for the experiment. If it doesn't exist already, it'll be\n created along with 4 subfolders: 'Configurations', 'Logs , 'Weights' and 'Plots'.\n " self._experiment_directory = None self._step = None self._epoch = None self._config = {} self._meta_config = {'exclude_attrs_from_save': [], 'stateless_attributes': [], 'stateful_attributes': []} self._cache = {} self._argv = None self._default_dispatch = None self.experiment_directory = experiment_directory super(BaseExperiment, self).__init__()
def __init__(self, experiment_directory=None): "\n Base class for all experiments to derive from.\n\n Parameters\n ----------\n experiment_directory : str\n Specify a directory for the experiment. If it doesn't exist already, it'll be\n created along with 4 subfolders: 'Configurations', 'Logs , 'Weights' and 'Plots'.\n " self._experiment_directory = None self._step = None self._epoch = None self._config = {} self._meta_config = {'exclude_attrs_from_save': [], 'stateless_attributes': [], 'stateful_attributes': []} self._cache = {} self._argv = None self._default_dispatch = None self.experiment_directory = experiment_directory super(BaseExperiment, self).__init__()<|docstring|>Base class for all experiments to derive from. Parameters ---------- experiment_directory : str Specify a directory for the experiment. If it doesn't exist already, it'll be created along with 4 subfolders: 'Configurations', 'Logs , 'Weights' and 'Plots'.<|endoftext|>
7ee352922425d24a6dc86c2957713125fc4e2533d0b276c2a1fa5d90e7dfad30
@property def step(self): 'The current (global) step.' if (self._step is None): self._step = 0 return self._step
The current (global) step.
speedrun/core.py
step
nasimrahaman/speedrun
76
python
@property def step(self): if (self._step is None): self._step = 0 return self._step
@property def step(self): if (self._step is None): self._step = 0 return self._step<|docstring|>The current (global) step.<|endoftext|>
fbe37b7e04961f07be2a1c3876f45867202c9b295ffe944b9917a2ff62246e2d
def next_step(self): 'Increments the global step counter.' self._step = (0 if (self._step is None) else self._step) self._step += 1 return self
Increments the global step counter.
speedrun/core.py
next_step
nasimrahaman/speedrun
76
python
def next_step(self): self._step = (0 if (self._step is None) else self._step) self._step += 1 return self
def next_step(self): self._step = (0 if (self._step is None) else self._step) self._step += 1 return self<|docstring|>Increments the global step counter.<|endoftext|>
9becd4adbe6102a6f3c43e408cb19fb3af0f2fc57ef227953a2cc764ef88e297
@property def epoch(self): 'The current epoch.' if (self._epoch is None): self._epoch = 0 return self._epoch
The current epoch.
speedrun/core.py
epoch
nasimrahaman/speedrun
76
python
@property def epoch(self): if (self._epoch is None): self._epoch = 0 return self._epoch
@property def epoch(self): if (self._epoch is None): self._epoch = 0 return self._epoch<|docstring|>The current epoch.<|endoftext|>
4c0d6cd4beff9d6f6bb92ac802d1058b297692a914577e38b1958b3597a606c0
def next_epoch(self): 'Increments the epoch counter.' self._epoch = (0 if (self._epoch is None) else self._epoch) self._epoch += 1 return self
Increments the epoch counter.
speedrun/core.py
next_epoch
nasimrahaman/speedrun
76
python
def next_epoch(self): self._epoch = (0 if (self._epoch is None) else self._epoch) self._epoch += 1 return self
def next_epoch(self): self._epoch = (0 if (self._epoch is None) else self._epoch) self._epoch += 1 return self<|docstring|>Increments the epoch counter.<|endoftext|>
abe4433177d04c784c7926b86da8db6ed17c0fc6569042cad55728a5ecf7d138
@property def experiment_directory(self): 'Directory for the experiment.' return self._experiment_directory
Directory for the experiment.
speedrun/core.py
experiment_directory
nasimrahaman/speedrun
76
python
@property def experiment_directory(self): return self._experiment_directory
@property def experiment_directory(self): return self._experiment_directory<|docstring|>Directory for the experiment.<|endoftext|>
b1be3c52a25a212ab4d8ce2b55872b8894fb08152486359d8471aa188325c63f
@property def log_directory(self): 'Directory where the log files go.' if (self._experiment_directory is not None): return os.path.join(self._experiment_directory, 'Logs') else: return None
Directory where the log files go.
speedrun/core.py
log_directory
nasimrahaman/speedrun
76
python
@property def log_directory(self): if (self._experiment_directory is not None): return os.path.join(self._experiment_directory, 'Logs') else: return None
@property def log_directory(self): if (self._experiment_directory is not None): return os.path.join(self._experiment_directory, 'Logs') else: return None<|docstring|>Directory where the log files go.<|endoftext|>
4c385c4a644622ece5d7a34fa22a93d0ad095358900b7b55d954d5381cb33cbe
@property def checkpoint_directory(self): 'Directory where the checkpoints go.' if (self._experiment_directory is not None): return os.path.join(self._experiment_directory, 'Weights') else: return None
Directory where the checkpoints go.
speedrun/core.py
checkpoint_directory
nasimrahaman/speedrun
76
python
@property def checkpoint_directory(self): if (self._experiment_directory is not None): return os.path.join(self._experiment_directory, 'Weights') else: return None
@property def checkpoint_directory(self): if (self._experiment_directory is not None): return os.path.join(self._experiment_directory, 'Weights') else: return None<|docstring|>Directory where the checkpoints go.<|endoftext|>
beac09a3ecdafcc83d889c6ff664599094ebdc305b56d1c9ffd41fdbff6ad0e1
@property def plot_directory(self): 'Directory where the plots go.' if (self._experiment_directory is not None): return os.path.join(self._experiment_directory, 'Plots') else: return None
Directory where the plots go.
speedrun/core.py
plot_directory
nasimrahaman/speedrun
76
python
@property def plot_directory(self): if (self._experiment_directory is not None): return os.path.join(self._experiment_directory, 'Plots') else: return None
@property def plot_directory(self): if (self._experiment_directory is not None): return os.path.join(self._experiment_directory, 'Plots') else: return None<|docstring|>Directory where the plots go.<|endoftext|>
ebcf0204cb96b6be22aa4e0514b8c78462c4db381dc4f30b704895d5bc557e76
@property def configuration_directory(self): 'Directory where the configurations go.' if (self._experiment_directory is not None): return os.path.join(self._experiment_directory, 'Configurations') else: return None
Directory where the configurations go.
speedrun/core.py
configuration_directory
nasimrahaman/speedrun
76
python
@property def configuration_directory(self): if (self._experiment_directory is not None): return os.path.join(self._experiment_directory, 'Configurations') else: return None
@property def configuration_directory(self): if (self._experiment_directory is not None): return os.path.join(self._experiment_directory, 'Configurations') else: return None<|docstring|>Directory where the configurations go.<|endoftext|>
fed074ca7638d7385f723c1c38588fc5f4084bd6a3baa21fd53aa0998dc0e030
def inherit_configuration(self, from_experiment_directory, file_name='train_config.yml', read=True): '\n Given another experiment directory, inherit the configuration file by\n copying it over to the current configuration directory.\n\n Parameters\n ----------\n from_experiment_directory : str\n The other experiment directory to inherit from.\n file_name : str\n Name of the .yml configuration file.\n read : bool\n Whether to read the configuration file after copying it over.\n Returns\n -------\n BaseExperiment\n\n ' source_path = os.path.join(from_experiment_directory, 'Configurations', file_name) target_path = os.path.join(self.configuration_directory, file_name) shutil.copy(source_path, target_path) if read: self.read_config_file() return self
Given another experiment directory, inherit the configuration file by copying it over to the current configuration directory. Parameters ---------- from_experiment_directory : str The other experiment directory to inherit from. file_name : str Name of the .yml configuration file. read : bool Whether to read the configuration file after copying it over. Returns ------- BaseExperiment
speedrun/core.py
inherit_configuration
nasimrahaman/speedrun
76
python
def inherit_configuration(self, from_experiment_directory, file_name='train_config.yml', read=True): '\n Given another experiment directory, inherit the configuration file by\n copying it over to the current configuration directory.\n\n Parameters\n ----------\n from_experiment_directory : str\n The other experiment directory to inherit from.\n file_name : str\n Name of the .yml configuration file.\n read : bool\n Whether to read the configuration file after copying it over.\n Returns\n -------\n BaseExperiment\n\n ' source_path = os.path.join(from_experiment_directory, 'Configurations', file_name) target_path = os.path.join(self.configuration_directory, file_name) shutil.copy(source_path, target_path) if read: self.read_config_file() return self
def inherit_configuration(self, from_experiment_directory, file_name='train_config.yml', read=True): '\n Given another experiment directory, inherit the configuration file by\n copying it over to the current configuration directory.\n\n Parameters\n ----------\n from_experiment_directory : str\n The other experiment directory to inherit from.\n file_name : str\n Name of the .yml configuration file.\n read : bool\n Whether to read the configuration file after copying it over.\n Returns\n -------\n BaseExperiment\n\n ' source_path = os.path.join(from_experiment_directory, 'Configurations', file_name) target_path = os.path.join(self.configuration_directory, file_name) shutil.copy(source_path, target_path) if read: self.read_config_file() return self<|docstring|>Given another experiment directory, inherit the configuration file by copying it over to the current configuration directory. Parameters ---------- from_experiment_directory : str The other experiment directory to inherit from. file_name : str Name of the .yml configuration file. read : bool Whether to read the configuration file after copying it over. Returns ------- BaseExperiment<|endoftext|>
a80761ee4dcb55312aa849926f3437c32cf6b55af965a84f84ef90c20c816c88
def dump_configuration(self, file_name='train_config.yml'): '\n Dump current configuration (dictionary) to a file in the configuration directory\n of the current experiment.\n\n Parameters\n ----------\n file_name : str\n Name of the .yml file to dump to.\n\n Returns\n -------\n BaseExperiment\n ' dump_path = os.path.join(self.configuration_directory, file_name) with open(dump_path, 'w') as f: yaml.dump(self._config, f) return self
Dump current configuration (dictionary) to a file in the configuration directory of the current experiment. Parameters ---------- file_name : str Name of the .yml file to dump to. Returns ------- BaseExperiment
speedrun/core.py
dump_configuration
nasimrahaman/speedrun
76
python
def dump_configuration(self, file_name='train_config.yml'): '\n Dump current configuration (dictionary) to a file in the configuration directory\n of the current experiment.\n\n Parameters\n ----------\n file_name : str\n Name of the .yml file to dump to.\n\n Returns\n -------\n BaseExperiment\n ' dump_path = os.path.join(self.configuration_directory, file_name) with open(dump_path, 'w') as f: yaml.dump(self._config, f) return self
def dump_configuration(self, file_name='train_config.yml'): '\n Dump current configuration (dictionary) to a file in the configuration directory\n of the current experiment.\n\n Parameters\n ----------\n file_name : str\n Name of the .yml file to dump to.\n\n Returns\n -------\n BaseExperiment\n ' dump_path = os.path.join(self.configuration_directory, file_name) with open(dump_path, 'w') as f: yaml.dump(self._config, f) return self<|docstring|>Dump current configuration (dictionary) to a file in the configuration directory of the current experiment. Parameters ---------- file_name : str Name of the .yml file to dump to. Returns ------- BaseExperiment<|endoftext|>
04c2fdd03f8b145d308ada402ca2f4fdc66a8d1d54024d5b810c52de1a2277bf
def record_args(self): 'Record the command line args. This must be called before calling say `get_arg`.' self._argv = sys.argv return self
Record the command line args. This must be called before calling say `get_arg`.
speedrun/core.py
record_args
nasimrahaman/speedrun
76
python
def record_args(self): self._argv = sys.argv return self
def record_args(self): self._argv = sys.argv return self<|docstring|>Record the command line args. This must be called before calling say `get_arg`.<|endoftext|>
c3132908ddb98edbb557134e4ee29bfe524b85dc570aa21b77e229e8d4c382c7
def get_arg(self, tag, default=None, ensure_exists=False): "\n Get command line argument.\n\n Parameters\n ----------\n tag : str or int\n Command line argument name or index.\n default :\n Default value.\n ensure_exists :\n Raise an error if tag not found in command line arguments.\n\n Examples\n --------\n In the terminal:\n\n $ python my_experiment.py ./EXPERIMENT-0 --blah 42\n\n >>> experiment = BaseExperiment().record_args()\n >>> experiment.get_arg('blah') # Prints 42\n >>> assert isinstance(experiment.get_arg('blah'), int) # type parsing with ast\n >>> experiment.get_arg(0) # Prints './EXPERIMENT-0'\n " assert (self._argv is not None), 'Args not parsed yet. Have you called `self.record_args()`?' if (not isinstance(tag, str)): assert isinstance(tag, int) if ensure_exists: assert (tag < len(self._argv)), f'Accessing arg at index {tag}, but only {len(self._argv)} args available.' return (default if (tag >= len(self._argv)) else self._argv[tag]) if (f'--{tag}' in self._argv): value = self._argv[(self._argv.index(f'--{tag}') + 1)] try: value = ast.literal_eval(value) except (ValueError, SyntaxError): pass return value else: if ensure_exists: raise KeyError(f'Argument --{tag} is not provided, but it should be.') return default
Get command line argument. Parameters ---------- tag : str or int Command line argument name or index. default : Default value. ensure_exists : Raise an error if tag not found in command line arguments. Examples -------- In the terminal: $ python my_experiment.py ./EXPERIMENT-0 --blah 42 >>> experiment = BaseExperiment().record_args() >>> experiment.get_arg('blah') # Prints 42 >>> assert isinstance(experiment.get_arg('blah'), int) # type parsing with ast >>> experiment.get_arg(0) # Prints './EXPERIMENT-0'
speedrun/core.py
get_arg
nasimrahaman/speedrun
76
python
def get_arg(self, tag, default=None, ensure_exists=False): "\n Get command line argument.\n\n Parameters\n ----------\n tag : str or int\n Command line argument name or index.\n default :\n Default value.\n ensure_exists :\n Raise an error if tag not found in command line arguments.\n\n Examples\n --------\n In the terminal:\n\n $ python my_experiment.py ./EXPERIMENT-0 --blah 42\n\n >>> experiment = BaseExperiment().record_args()\n >>> experiment.get_arg('blah') # Prints 42\n >>> assert isinstance(experiment.get_arg('blah'), int) # type parsing with ast\n >>> experiment.get_arg(0) # Prints './EXPERIMENT-0'\n " assert (self._argv is not None), 'Args not parsed yet. Have you called `self.record_args()`?' if (not isinstance(tag, str)): assert isinstance(tag, int) if ensure_exists: assert (tag < len(self._argv)), f'Accessing arg at index {tag}, but only {len(self._argv)} args available.' return (default if (tag >= len(self._argv)) else self._argv[tag]) if (f'--{tag}' in self._argv): value = self._argv[(self._argv.index(f'--{tag}') + 1)] try: value = ast.literal_eval(value) except (ValueError, SyntaxError): pass return value else: if ensure_exists: raise KeyError(f'Argument --{tag} is not provided, but it should be.') return default
def get_arg(self, tag, default=None, ensure_exists=False): "\n Get command line argument.\n\n Parameters\n ----------\n tag : str or int\n Command line argument name or index.\n default :\n Default value.\n ensure_exists :\n Raise an error if tag not found in command line arguments.\n\n Examples\n --------\n In the terminal:\n\n $ python my_experiment.py ./EXPERIMENT-0 --blah 42\n\n >>> experiment = BaseExperiment().record_args()\n >>> experiment.get_arg('blah') # Prints 42\n >>> assert isinstance(experiment.get_arg('blah'), int) # type parsing with ast\n >>> experiment.get_arg(0) # Prints './EXPERIMENT-0'\n " assert (self._argv is not None), 'Args not parsed yet. Have you called `self.record_args()`?' if (not isinstance(tag, str)): assert isinstance(tag, int) if ensure_exists: assert (tag < len(self._argv)), f'Accessing arg at index {tag}, but only {len(self._argv)} args available.' return (default if (tag >= len(self._argv)) else self._argv[tag]) if (f'--{tag}' in self._argv): value = self._argv[(self._argv.index(f'--{tag}') + 1)] try: value = ast.literal_eval(value) except (ValueError, SyntaxError): pass return value else: if ensure_exists: raise KeyError(f'Argument --{tag} is not provided, but it should be.') return default<|docstring|>Get command line argument. Parameters ---------- tag : str or int Command line argument name or index. default : Default value. ensure_exists : Raise an error if tag not found in command line arguments. Examples -------- In the terminal: $ python my_experiment.py ./EXPERIMENT-0 --blah 42 >>> experiment = BaseExperiment().record_args() >>> experiment.get_arg('blah') # Prints 42 >>> assert isinstance(experiment.get_arg('blah'), int) # type parsing with ast >>> experiment.get_arg(0) # Prints './EXPERIMENT-0'<|endoftext|>
0ab61d89628938416915368d31159fa737f070e7a85b981708a86cd3d37fcae3
def update_configuration_from_args(self): "\n Override fields in the configuration file with command line arguments.\n\n Examples\n --------\n In the terminal:\n\n $ python my_experiment.py ./EXPERIMENT-0 --config.training.optimizer Adam\n --config.training.lr 0.0001\n\n >>> experiment = BaseExperiment().record_args().parse_experiment_directory()\n >>> experiment.read_config_file()\n >>> print(experiment.get('training/optimizer')) # Say this prints 'SGD'\n >>> experiment.update_configuration_from_args()\n >>> print(experiment.get('training/optimizer')) # This would print 'Adam'\n >>> print(experiment.get('training/lr')) # This would print 0.0001\n >>> assert isinstance(experiment.get('training/lr'), float) # Works\n\n Returns\n -------\n BaseExperiment\n\n " for arg in self._argv: if arg.startswith('--config.'): tag = arg.replace('--config.', '').replace('.', '/') value = self.get_arg(arg.lstrip('--'), ensure_exists=True) self.set(tag, value) return self
Override fields in the configuration file with command line arguments. Examples -------- In the terminal: $ python my_experiment.py ./EXPERIMENT-0 --config.training.optimizer Adam --config.training.lr 0.0001 >>> experiment = BaseExperiment().record_args().parse_experiment_directory() >>> experiment.read_config_file() >>> print(experiment.get('training/optimizer')) # Say this prints 'SGD' >>> experiment.update_configuration_from_args() >>> print(experiment.get('training/optimizer')) # This would print 'Adam' >>> print(experiment.get('training/lr')) # This would print 0.0001 >>> assert isinstance(experiment.get('training/lr'), float) # Works Returns ------- BaseExperiment
speedrun/core.py
update_configuration_from_args
nasimrahaman/speedrun
76
python
def update_configuration_from_args(self): "\n Override fields in the configuration file with command line arguments.\n\n Examples\n --------\n In the terminal:\n\n $ python my_experiment.py ./EXPERIMENT-0 --config.training.optimizer Adam\n --config.training.lr 0.0001\n\n >>> experiment = BaseExperiment().record_args().parse_experiment_directory()\n >>> experiment.read_config_file()\n >>> print(experiment.get('training/optimizer')) # Say this prints 'SGD'\n >>> experiment.update_configuration_from_args()\n >>> print(experiment.get('training/optimizer')) # This would print 'Adam'\n >>> print(experiment.get('training/lr')) # This would print 0.0001\n >>> assert isinstance(experiment.get('training/lr'), float) # Works\n\n Returns\n -------\n BaseExperiment\n\n " for arg in self._argv: if arg.startswith('--config.'): tag = arg.replace('--config.', ).replace('.', '/') value = self.get_arg(arg.lstrip('--'), ensure_exists=True) self.set(tag, value) return self
def update_configuration_from_args(self): "\n Override fields in the configuration file with command line arguments.\n\n Examples\n --------\n In the terminal:\n\n $ python my_experiment.py ./EXPERIMENT-0 --config.training.optimizer Adam\n --config.training.lr 0.0001\n\n >>> experiment = BaseExperiment().record_args().parse_experiment_directory()\n >>> experiment.read_config_file()\n >>> print(experiment.get('training/optimizer')) # Say this prints 'SGD'\n >>> experiment.update_configuration_from_args()\n >>> print(experiment.get('training/optimizer')) # This would print 'Adam'\n >>> print(experiment.get('training/lr')) # This would print 0.0001\n >>> assert isinstance(experiment.get('training/lr'), float) # Works\n\n Returns\n -------\n BaseExperiment\n\n " for arg in self._argv: if arg.startswith('--config.'): tag = arg.replace('--config.', ).replace('.', '/') value = self.get_arg(arg.lstrip('--'), ensure_exists=True) self.set(tag, value) return self<|docstring|>Override fields in the configuration file with command line arguments. Examples -------- In the terminal: $ python my_experiment.py ./EXPERIMENT-0 --config.training.optimizer Adam --config.training.lr 0.0001 >>> experiment = BaseExperiment().record_args().parse_experiment_directory() >>> experiment.read_config_file() >>> print(experiment.get('training/optimizer')) # Say this prints 'SGD' >>> experiment.update_configuration_from_args() >>> print(experiment.get('training/optimizer')) # This would print 'Adam' >>> print(experiment.get('training/lr')) # This would print 0.0001 >>> assert isinstance(experiment.get('training/lr'), float) # Works Returns ------- BaseExperiment<|endoftext|>
ca1ee856cde92c05b5e0054498d4d6b8fe9f8c15f4211cb7b28ed2a36222e00e
def register_unpickleable(self, *attributes): '\n Specify the attributes that are not pickleable. If the experiment contains\n unregistered unpickleable attributes, `BaseExperiment.checkpoint` might throw an error\n if not overloaded.\n ' self._meta_config['exclude_attrs_from_save'].extend(list(attributes)) return self
Specify the attributes that are not pickleable. If the experiment contains unregistered unpickleable attributes, `BaseExperiment.checkpoint` might throw an error if not overloaded.
speedrun/core.py
register_unpickleable
nasimrahaman/speedrun
76
python
def register_unpickleable(self, *attributes): '\n Specify the attributes that are not pickleable. If the experiment contains\n unregistered unpickleable attributes, `BaseExperiment.checkpoint` might throw an error\n if not overloaded.\n ' self._meta_config['exclude_attrs_from_save'].extend(list(attributes)) return self
def register_unpickleable(self, *attributes): '\n Specify the attributes that are not pickleable. If the experiment contains\n unregistered unpickleable attributes, `BaseExperiment.checkpoint` might throw an error\n if not overloaded.\n ' self._meta_config['exclude_attrs_from_save'].extend(list(attributes)) return self<|docstring|>Specify the attributes that are not pickleable. If the experiment contains unregistered unpickleable attributes, `BaseExperiment.checkpoint` might throw an error if not overloaded.<|endoftext|>
417c79e411cf977fbf2cc821ce4d613d39bfca5fb5f0951d622cfd892b272d13
def checkpoint(self, force=True): "\n Makes a checkpoint by dumping all experiment attributes to a pickle file.\n If force=False, this would only save if the global step and the save frequency\n set in the config file (under 'training/checkpoint_every') match.\n\n Warnings\n --------\n\n If your experiment has unpickleable objects, make sure to register them with\n `self.register_unpickleable` to not have them pickled.\n\n Parameters\n ----------\n force : bool\n If set to false, a checkpoint will be created only if global step and the\n save frequency set in the config file (under 'training/checkpoint_every') match.\n\n Returns\n -------\n BaseExperiment\n " if force: do_checkpoint = True else: do_checkpoint = ((self.step % self.get('training/checkpoint_every')) == 0) if do_checkpoint: self_dict = {key: val for (key, val) in self.__dict__.items() if (key not in self._meta_config['exclude_attrs_from_save'])} save(self_dict, os.path.join(self.checkpoint_directory, f'checkpoint_iter_{self.step}.pt')) return self
Makes a checkpoint by dumping all experiment attributes to a pickle file. If force=False, this would only save if the global step and the save frequency set in the config file (under 'training/checkpoint_every') match. Warnings -------- If your experiment has unpickleable objects, make sure to register them with `self.register_unpickleable` to not have them pickled. Parameters ---------- force : bool If set to false, a checkpoint will be created only if global step and the save frequency set in the config file (under 'training/checkpoint_every') match. Returns ------- BaseExperiment
speedrun/core.py
checkpoint
nasimrahaman/speedrun
76
python
def checkpoint(self, force=True): "\n Makes a checkpoint by dumping all experiment attributes to a pickle file.\n If force=False, this would only save if the global step and the save frequency\n set in the config file (under 'training/checkpoint_every') match.\n\n Warnings\n --------\n\n If your experiment has unpickleable objects, make sure to register them with\n `self.register_unpickleable` to not have them pickled.\n\n Parameters\n ----------\n force : bool\n If set to false, a checkpoint will be created only if global step and the\n save frequency set in the config file (under 'training/checkpoint_every') match.\n\n Returns\n -------\n BaseExperiment\n " if force: do_checkpoint = True else: do_checkpoint = ((self.step % self.get('training/checkpoint_every')) == 0) if do_checkpoint: self_dict = {key: val for (key, val) in self.__dict__.items() if (key not in self._meta_config['exclude_attrs_from_save'])} save(self_dict, os.path.join(self.checkpoint_directory, f'checkpoint_iter_{self.step}.pt')) return self
def checkpoint(self, force=True): "\n Makes a checkpoint by dumping all experiment attributes to a pickle file.\n If force=False, this would only save if the global step and the save frequency\n set in the config file (under 'training/checkpoint_every') match.\n\n Warnings\n --------\n\n If your experiment has unpickleable objects, make sure to register them with\n `self.register_unpickleable` to not have them pickled.\n\n Parameters\n ----------\n force : bool\n If set to false, a checkpoint will be created only if global step and the\n save frequency set in the config file (under 'training/checkpoint_every') match.\n\n Returns\n -------\n BaseExperiment\n " if force: do_checkpoint = True else: do_checkpoint = ((self.step % self.get('training/checkpoint_every')) == 0) if do_checkpoint: self_dict = {key: val for (key, val) in self.__dict__.items() if (key not in self._meta_config['exclude_attrs_from_save'])} save(self_dict, os.path.join(self.checkpoint_directory, f'checkpoint_iter_{self.step}.pt')) return self<|docstring|>Makes a checkpoint by dumping all experiment attributes to a pickle file. If force=False, this would only save if the global step and the save frequency set in the config file (under 'training/checkpoint_every') match. Warnings -------- If your experiment has unpickleable objects, make sure to register them with `self.register_unpickleable` to not have them pickled. Parameters ---------- force : bool If set to false, a checkpoint will be created only if global step and the save frequency set in the config file (under 'training/checkpoint_every') match. Returns ------- BaseExperiment<|endoftext|>
3a085ae1decc83a3e382530834ced62584af4e7ef09e20832a492baabb45f087
def load_from_checkpoint(self, step=None): '\n Load checkpoint from file. Note that attributes registered as unpickleable are not\n pickled, and will not be loaded.\n\n Parameters\n ----------\n step : int\n Load checkpoint made at step.\n\n Returns\n -------\n BaseExperiment\n ' for filename in os.listdir(self.checkpoint_directory): if (filename.startswith('checkpoint_iter_') and filename.endswith('.pt')): try: ckpt_step = int(filename.strip('checkpoint_iter_.pt')) except ValueError: continue if (ckpt_step == step): self_dict = load(filename) self.__dict__.update(self_dict) break else: raise FileNotFoundError(f'No checkpoint for step {step} found in {self.checkpoint_directory}.') return self
Load checkpoint from file. Note that attributes registered as unpickleable are not pickled, and will not be loaded. Parameters ---------- step : int Load checkpoint made at step. Returns ------- BaseExperiment
speedrun/core.py
load_from_checkpoint
nasimrahaman/speedrun
76
python
def load_from_checkpoint(self, step=None): '\n Load checkpoint from file. Note that attributes registered as unpickleable are not\n pickled, and will not be loaded.\n\n Parameters\n ----------\n step : int\n Load checkpoint made at step.\n\n Returns\n -------\n BaseExperiment\n ' for filename in os.listdir(self.checkpoint_directory): if (filename.startswith('checkpoint_iter_') and filename.endswith('.pt')): try: ckpt_step = int(filename.strip('checkpoint_iter_.pt')) except ValueError: continue if (ckpt_step == step): self_dict = load(filename) self.__dict__.update(self_dict) break else: raise FileNotFoundError(f'No checkpoint for step {step} found in {self.checkpoint_directory}.') return self
def load_from_checkpoint(self, step=None): '\n Load checkpoint from file. Note that attributes registered as unpickleable are not\n pickled, and will not be loaded.\n\n Parameters\n ----------\n step : int\n Load checkpoint made at step.\n\n Returns\n -------\n BaseExperiment\n ' for filename in os.listdir(self.checkpoint_directory): if (filename.startswith('checkpoint_iter_') and filename.endswith('.pt')): try: ckpt_step = int(filename.strip('checkpoint_iter_.pt')) except ValueError: continue if (ckpt_step == step): self_dict = load(filename) self.__dict__.update(self_dict) break else: raise FileNotFoundError(f'No checkpoint for step {step} found in {self.checkpoint_directory}.') return self<|docstring|>Load checkpoint from file. Note that attributes registered as unpickleable are not pickled, and will not be loaded. Parameters ---------- step : int Load checkpoint made at step. Returns ------- BaseExperiment<|endoftext|>
201211dd45b7fc4b382ce5ec1d6564fc6dad66ec9e2011fb361c1b540959f7a4
def get(self, tag, default=None, ensure_exists=False): "\n Retrieves a field from the configuration.\n\n Examples\n --------\n Say the configuration file reads:\n\n ```yaml\n my_field:\n my_subfield: 12\n subsubfields:\n my_subsubfield: 42\n my_new_field: 0\n ```\n\n >>> experiment = BaseExperiment().parse_experiment_directory().read_config_file()\n >>> print(experiment.get('my_field/my_subfield')) # Prints 12\n >>> print(experiment.get('my_field/subsubfields/my_subsubfield')) # Prints 42\n >>> print(experiment.get('my_new_field')) # Prints 0\n >>> print(experiment.get('i_dont_exist', 13)) # Prints 13\n >>> print(experiment.get('i_should_exist', ensure_exists=True)) # Raises an error\n\n Parameters\n ----------\n tag : str\n Path in the hierarchical configuration (see example).\n default :\n Default value if object corresponding to path not found.\n ensure_exists : bool\n Whether an error should be raised if the path doesn't exist.\n " paths = tag.split('/') data = self._config for path in paths: if ensure_exists: assert (path in data) data = data.get(path, (default if (path == paths[(- 1)]) else {})) return data
Retrieves a field from the configuration. Examples -------- Say the configuration file reads: ```yaml my_field: my_subfield: 12 subsubfields: my_subsubfield: 42 my_new_field: 0 ``` >>> experiment = BaseExperiment().parse_experiment_directory().read_config_file() >>> print(experiment.get('my_field/my_subfield')) # Prints 12 >>> print(experiment.get('my_field/subsubfields/my_subsubfield')) # Prints 42 >>> print(experiment.get('my_new_field')) # Prints 0 >>> print(experiment.get('i_dont_exist', 13)) # Prints 13 >>> print(experiment.get('i_should_exist', ensure_exists=True)) # Raises an error Parameters ---------- tag : str Path in the hierarchical configuration (see example). default : Default value if object corresponding to path not found. ensure_exists : bool Whether an error should be raised if the path doesn't exist.
speedrun/core.py
get
nasimrahaman/speedrun
76
python
def get(self, tag, default=None, ensure_exists=False): "\n Retrieves a field from the configuration.\n\n Examples\n --------\n Say the configuration file reads:\n\n ```yaml\n my_field:\n my_subfield: 12\n subsubfields:\n my_subsubfield: 42\n my_new_field: 0\n ```\n\n >>> experiment = BaseExperiment().parse_experiment_directory().read_config_file()\n >>> print(experiment.get('my_field/my_subfield')) # Prints 12\n >>> print(experiment.get('my_field/subsubfields/my_subsubfield')) # Prints 42\n >>> print(experiment.get('my_new_field')) # Prints 0\n >>> print(experiment.get('i_dont_exist', 13)) # Prints 13\n >>> print(experiment.get('i_should_exist', ensure_exists=True)) # Raises an error\n\n Parameters\n ----------\n tag : str\n Path in the hierarchical configuration (see example).\n default :\n Default value if object corresponding to path not found.\n ensure_exists : bool\n Whether an error should be raised if the path doesn't exist.\n " paths = tag.split('/') data = self._config for path in paths: if ensure_exists: assert (path in data) data = data.get(path, (default if (path == paths[(- 1)]) else {})) return data
def get(self, tag, default=None, ensure_exists=False): "\n Retrieves a field from the configuration.\n\n Examples\n --------\n Say the configuration file reads:\n\n ```yaml\n my_field:\n my_subfield: 12\n subsubfields:\n my_subsubfield: 42\n my_new_field: 0\n ```\n\n >>> experiment = BaseExperiment().parse_experiment_directory().read_config_file()\n >>> print(experiment.get('my_field/my_subfield')) # Prints 12\n >>> print(experiment.get('my_field/subsubfields/my_subsubfield')) # Prints 42\n >>> print(experiment.get('my_new_field')) # Prints 0\n >>> print(experiment.get('i_dont_exist', 13)) # Prints 13\n >>> print(experiment.get('i_should_exist', ensure_exists=True)) # Raises an error\n\n Parameters\n ----------\n tag : str\n Path in the hierarchical configuration (see example).\n default :\n Default value if object corresponding to path not found.\n ensure_exists : bool\n Whether an error should be raised if the path doesn't exist.\n " paths = tag.split('/') data = self._config for path in paths: if ensure_exists: assert (path in data) data = data.get(path, (default if (path == paths[(- 1)]) else {})) return data<|docstring|>Retrieves a field from the configuration. Examples -------- Say the configuration file reads: ```yaml my_field: my_subfield: 12 subsubfields: my_subsubfield: 42 my_new_field: 0 ``` >>> experiment = BaseExperiment().parse_experiment_directory().read_config_file() >>> print(experiment.get('my_field/my_subfield')) # Prints 12 >>> print(experiment.get('my_field/subsubfields/my_subsubfield')) # Prints 42 >>> print(experiment.get('my_new_field')) # Prints 0 >>> print(experiment.get('i_dont_exist', 13)) # Prints 13 >>> print(experiment.get('i_should_exist', ensure_exists=True)) # Raises an error Parameters ---------- tag : str Path in the hierarchical configuration (see example). default : Default value if object corresponding to path not found. ensure_exists : bool Whether an error should be raised if the path doesn't exist.<|endoftext|>
f35c705ad2d16c9b831ffdb947c2032f409c33798662692a57eb7bb5185144d2
def set(self, tag, value): "\n Like get, but sets.\n\n Examples\n --------\n >>> experiment = BaseExperiment()\n >>> experiment.set('a/b', 42)\n >>> print(experiment.get('a/b')) # Prints 42\n\n Parameters\n ----------\n tag : str\n Path in the hierarchical configuration.\n value :\n Value to set.\n\n Returns\n -------\n BaseExperiment\n " paths = tag.split('/') data = self._config for path in paths[:(- 1)]: if (path in data): data = data[path] else: data.update({path: {}}) data = data[path] data[paths[(- 1)]] = value return self
Like get, but sets. Examples -------- >>> experiment = BaseExperiment() >>> experiment.set('a/b', 42) >>> print(experiment.get('a/b')) # Prints 42 Parameters ---------- tag : str Path in the hierarchical configuration. value : Value to set. Returns ------- BaseExperiment
speedrun/core.py
set
nasimrahaman/speedrun
76
python
def set(self, tag, value): "\n Like get, but sets.\n\n Examples\n --------\n >>> experiment = BaseExperiment()\n >>> experiment.set('a/b', 42)\n >>> print(experiment.get('a/b')) # Prints 42\n\n Parameters\n ----------\n tag : str\n Path in the hierarchical configuration.\n value :\n Value to set.\n\n Returns\n -------\n BaseExperiment\n " paths = tag.split('/') data = self._config for path in paths[:(- 1)]: if (path in data): data = data[path] else: data.update({path: {}}) data = data[path] data[paths[(- 1)]] = value return self
def set(self, tag, value): "\n Like get, but sets.\n\n Examples\n --------\n >>> experiment = BaseExperiment()\n >>> experiment.set('a/b', 42)\n >>> print(experiment.get('a/b')) # Prints 42\n\n Parameters\n ----------\n tag : str\n Path in the hierarchical configuration.\n value :\n Value to set.\n\n Returns\n -------\n BaseExperiment\n " paths = tag.split('/') data = self._config for path in paths[:(- 1)]: if (path in data): data = data[path] else: data.update({path: {}}) data = data[path] data[paths[(- 1)]] = value return self<|docstring|>Like get, but sets. Examples -------- >>> experiment = BaseExperiment() >>> experiment.set('a/b', 42) >>> print(experiment.get('a/b')) # Prints 42 Parameters ---------- tag : str Path in the hierarchical configuration. value : Value to set. Returns ------- BaseExperiment<|endoftext|>
fd41f7f042d8a1fff28a3b582ddaf1b457e091f6ee1c001008da8e5e072c22e5
@property def cache_keys(self): '\n List keys in the cache.\n ' return list(self._cache.keys())
List keys in the cache.
speedrun/core.py
cache_keys
nasimrahaman/speedrun
76
python
@property def cache_keys(self): '\n \n ' return list(self._cache.keys())
@property def cache_keys(self): '\n \n ' return list(self._cache.keys())<|docstring|>List keys in the cache.<|endoftext|>
f8af21d2800e817ea65180622b74eb9534f4f1e09bfba994c4241d263ebfb25f
def read_from_cache(self, tag, default=None, ensure_exists=False): '\n Read from the cache.\n\n Parameters\n ----------\n tag : str\n Tag to read.\n default :\n Default value.\n ensure_exists :\n Raises an error if tag is not found in cache.\n\n Returns\n -------\n Cache contents.\n ' if ensure_exists: assert (tag in self._cache) return self._cache.get(tag, default)
Read from the cache. Parameters ---------- tag : str Tag to read. default : Default value. ensure_exists : Raises an error if tag is not found in cache. Returns ------- Cache contents.
speedrun/core.py
read_from_cache
nasimrahaman/speedrun
76
python
def read_from_cache(self, tag, default=None, ensure_exists=False): '\n Read from the cache.\n\n Parameters\n ----------\n tag : str\n Tag to read.\n default :\n Default value.\n ensure_exists :\n Raises an error if tag is not found in cache.\n\n Returns\n -------\n Cache contents.\n ' if ensure_exists: assert (tag in self._cache) return self._cache.get(tag, default)
def read_from_cache(self, tag, default=None, ensure_exists=False): '\n Read from the cache.\n\n Parameters\n ----------\n tag : str\n Tag to read.\n default :\n Default value.\n ensure_exists :\n Raises an error if tag is not found in cache.\n\n Returns\n -------\n Cache contents.\n ' if ensure_exists: assert (tag in self._cache) return self._cache.get(tag, default)<|docstring|>Read from the cache. Parameters ---------- tag : str Tag to read. default : Default value. ensure_exists : Raises an error if tag is not found in cache. Returns ------- Cache contents.<|endoftext|>
4d0590f2efaae541b2e242f56e45491a33d9736aab3efcc1aa64c53e368595c1
def write_to_cache(self, tag, value): "\n Write a value to cache.\n\n Parameters\n ----------\n tag : str\n Tag to write.\n value :\n Value to write\n\n Returns\n -------\n BaseExperiment\n\n Examples\n --------\n >>> experiment = BaseExperiment()\n >>> experiment.write_to_cache('blah', 42)\n >>> experiment.read_from_cache('blah') # Prints 42\n " self._cache.update({tag: value}) return self
Write a value to cache. Parameters ---------- tag : str Tag to write. value : Value to write Returns ------- BaseExperiment Examples -------- >>> experiment = BaseExperiment() >>> experiment.write_to_cache('blah', 42) >>> experiment.read_from_cache('blah') # Prints 42
speedrun/core.py
write_to_cache
nasimrahaman/speedrun
76
python
def write_to_cache(self, tag, value): "\n Write a value to cache.\n\n Parameters\n ----------\n tag : str\n Tag to write.\n value :\n Value to write\n\n Returns\n -------\n BaseExperiment\n\n Examples\n --------\n >>> experiment = BaseExperiment()\n >>> experiment.write_to_cache('blah', 42)\n >>> experiment.read_from_cache('blah') # Prints 42\n " self._cache.update({tag: value}) return self
def write_to_cache(self, tag, value): "\n Write a value to cache.\n\n Parameters\n ----------\n tag : str\n Tag to write.\n value :\n Value to write\n\n Returns\n -------\n BaseExperiment\n\n Examples\n --------\n >>> experiment = BaseExperiment()\n >>> experiment.write_to_cache('blah', 42)\n >>> experiment.read_from_cache('blah') # Prints 42\n " self._cache.update({tag: value}) return self<|docstring|>Write a value to cache. Parameters ---------- tag : str Tag to write. value : Value to write Returns ------- BaseExperiment Examples -------- >>> experiment = BaseExperiment() >>> experiment.write_to_cache('blah', 42) >>> experiment.read_from_cache('blah') # Prints 42<|endoftext|>
db7da350ac9153c79b605a7eeb6668163afecbc856784bd5b53c58885e47fd6e
def accumulate_in_cache(self, tag, value, accumulate_fn=None): "\n Accumulate to an object in cache.\n\n Parameters\n ----------\n tag : str\n Tag to accumulate to.\n value :\n Value to accumulate.\n accumulate_fn : callable\n Accumulator function. Defaults to the increment operator, +=.\n\n Examples\n -------\n Simple:\n >>> experiment = BaseExperiment()\n >>> experiment.write_to_cache('loss', 2)\n >>> experiment.accumulate_in_cache('loss', 1).read_from_cache('loss') # Prints 3\n >>> experiment.accumulate_in_cache('loss', 2).read_from_cache('loss') # Prints 5\n\n With an accumulator function:\n >>> experiment = BaseExperiment()\n >>> experiment.write_to_cache('a', {})\n >>> experiment.accumulate_in_cache('a', 3, accumulate_fn=lambda x, y: x.update({y: 'la'}))\n >>> experiment.read_from_cache('a')[3] # Prints 'la'\n\n Returns\n -------\n BaseExperiment\n " if (tag not in self._cache): self.write_to_cache(tag, value) elif (accumulate_fn is None): self._cache[tag] += value else: assert callable(accumulate_fn) self._cache[tag] = accumulate_fn(self._cache[tag], value) return self
Accumulate to an object in cache. Parameters ---------- tag : str Tag to accumulate to. value : Value to accumulate. accumulate_fn : callable Accumulator function. Defaults to the increment operator, +=. Examples ------- Simple: >>> experiment = BaseExperiment() >>> experiment.write_to_cache('loss', 2) >>> experiment.accumulate_in_cache('loss', 1).read_from_cache('loss') # Prints 3 >>> experiment.accumulate_in_cache('loss', 2).read_from_cache('loss') # Prints 5 With an accumulator function: >>> experiment = BaseExperiment() >>> experiment.write_to_cache('a', {}) >>> experiment.accumulate_in_cache('a', 3, accumulate_fn=lambda x, y: x.update({y: 'la'})) >>> experiment.read_from_cache('a')[3] # Prints 'la' Returns ------- BaseExperiment
speedrun/core.py
accumulate_in_cache
nasimrahaman/speedrun
76
python
def accumulate_in_cache(self, tag, value, accumulate_fn=None): "\n Accumulate to an object in cache.\n\n Parameters\n ----------\n tag : str\n Tag to accumulate to.\n value :\n Value to accumulate.\n accumulate_fn : callable\n Accumulator function. Defaults to the increment operator, +=.\n\n Examples\n -------\n Simple:\n >>> experiment = BaseExperiment()\n >>> experiment.write_to_cache('loss', 2)\n >>> experiment.accumulate_in_cache('loss', 1).read_from_cache('loss') # Prints 3\n >>> experiment.accumulate_in_cache('loss', 2).read_from_cache('loss') # Prints 5\n\n With an accumulator function:\n >>> experiment = BaseExperiment()\n >>> experiment.write_to_cache('a', {})\n >>> experiment.accumulate_in_cache('a', 3, accumulate_fn=lambda x, y: x.update({y: 'la'}))\n >>> experiment.read_from_cache('a')[3] # Prints 'la'\n\n Returns\n -------\n BaseExperiment\n " if (tag not in self._cache): self.write_to_cache(tag, value) elif (accumulate_fn is None): self._cache[tag] += value else: assert callable(accumulate_fn) self._cache[tag] = accumulate_fn(self._cache[tag], value) return self
def accumulate_in_cache(self, tag, value, accumulate_fn=None): "\n Accumulate to an object in cache.\n\n Parameters\n ----------\n tag : str\n Tag to accumulate to.\n value :\n Value to accumulate.\n accumulate_fn : callable\n Accumulator function. Defaults to the increment operator, +=.\n\n Examples\n -------\n Simple:\n >>> experiment = BaseExperiment()\n >>> experiment.write_to_cache('loss', 2)\n >>> experiment.accumulate_in_cache('loss', 1).read_from_cache('loss') # Prints 3\n >>> experiment.accumulate_in_cache('loss', 2).read_from_cache('loss') # Prints 5\n\n With an accumulator function:\n >>> experiment = BaseExperiment()\n >>> experiment.write_to_cache('a', {})\n >>> experiment.accumulate_in_cache('a', 3, accumulate_fn=lambda x, y: x.update({y: 'la'}))\n >>> experiment.read_from_cache('a')[3] # Prints 'la'\n\n Returns\n -------\n BaseExperiment\n " if (tag not in self._cache): self.write_to_cache(tag, value) elif (accumulate_fn is None): self._cache[tag] += value else: assert callable(accumulate_fn) self._cache[tag] = accumulate_fn(self._cache[tag], value) return self<|docstring|>Accumulate to an object in cache. Parameters ---------- tag : str Tag to accumulate to. value : Value to accumulate. accumulate_fn : callable Accumulator function. Defaults to the increment operator, +=. Examples ------- Simple: >>> experiment = BaseExperiment() >>> experiment.write_to_cache('loss', 2) >>> experiment.accumulate_in_cache('loss', 1).read_from_cache('loss') # Prints 3 >>> experiment.accumulate_in_cache('loss', 2).read_from_cache('loss') # Prints 5 With an accumulator function: >>> experiment = BaseExperiment() >>> experiment.write_to_cache('a', {}) >>> experiment.accumulate_in_cache('a', 3, accumulate_fn=lambda x, y: x.update({y: 'la'})) >>> experiment.read_from_cache('a')[3] # Prints 'la' Returns ------- BaseExperiment<|endoftext|>
20156ba07ebf861eebf6aea551c66bb5fdf58a39df33a7b5f6bb8b18c780cff8
def clear_in_cache(self, tag): 'Remove `tag` from cache.' if (tag not in self._cache): pass else: del self._cache[tag] return self
Remove `tag` from cache.
speedrun/core.py
clear_in_cache
nasimrahaman/speedrun
76
python
def clear_in_cache(self, tag): if (tag not in self._cache): pass else: del self._cache[tag] return self
def clear_in_cache(self, tag): if (tag not in self._cache): pass else: del self._cache[tag] return self<|docstring|>Remove `tag` from cache.<|endoftext|>
73363a9cc14a45b83b36566f9487edcba7d006988dc11c38a91b4fb415b29f15
def clear_cache(self): 'Delete everything in cache.' self._cache.clear() return self
Delete everything in cache.
speedrun/core.py
clear_cache
nasimrahaman/speedrun
76
python
def clear_cache(self): self._cache.clear() return self
def clear_cache(self): self._cache.clear() return self<|docstring|>Delete everything in cache.<|endoftext|>
b62b608393eaae23796e40f5243e5429130e638314ac4418a4be995ae85c526c
def bundle(self, **kwargs): 'Pack kwargs to a Namespace object.' return Namespace(**kwargs)
Pack kwargs to a Namespace object.
speedrun/core.py
bundle
nasimrahaman/speedrun
76
python
def bundle(self, **kwargs): return Namespace(**kwargs)
def bundle(self, **kwargs): return Namespace(**kwargs)<|docstring|>Pack kwargs to a Namespace object.<|endoftext|>
78b8485a3c28000ebfd8560d1be44c67b5628e92f32c8d23725bc96935cb23b6
def read_config_file(self, file_name='train_config.yml', path=None): "\n Read configuration from a YAML file.\n\n Parameters\n ----------\n file_name : str\n Name of the file. Defaults to `train_config.yml`.\n path : str\n Path to file. Defaults to 'experiment_directory/Configurations/file_name'.\n\n Returns\n -------\n BaseExperiment\n " path = (os.path.join(self.configuration_directory, file_name) if (path is None) else path) if (not os.path.exists(path)): raise FileNotFoundError with open(path, 'r') as f: self._config = yaml.load(f, Loader=yaml.FullLoader) return self
Read configuration from a YAML file. Parameters ---------- file_name : str Name of the file. Defaults to `train_config.yml`. path : str Path to file. Defaults to 'experiment_directory/Configurations/file_name'. Returns ------- BaseExperiment
speedrun/core.py
read_config_file
nasimrahaman/speedrun
76
python
def read_config_file(self, file_name='train_config.yml', path=None): "\n Read configuration from a YAML file.\n\n Parameters\n ----------\n file_name : str\n Name of the file. Defaults to `train_config.yml`.\n path : str\n Path to file. Defaults to 'experiment_directory/Configurations/file_name'.\n\n Returns\n -------\n BaseExperiment\n " path = (os.path.join(self.configuration_directory, file_name) if (path is None) else path) if (not os.path.exists(path)): raise FileNotFoundError with open(path, 'r') as f: self._config = yaml.load(f, Loader=yaml.FullLoader) return self
def read_config_file(self, file_name='train_config.yml', path=None): "\n Read configuration from a YAML file.\n\n Parameters\n ----------\n file_name : str\n Name of the file. Defaults to `train_config.yml`.\n path : str\n Path to file. Defaults to 'experiment_directory/Configurations/file_name'.\n\n Returns\n -------\n BaseExperiment\n " path = (os.path.join(self.configuration_directory, file_name) if (path is None) else path) if (not os.path.exists(path)): raise FileNotFoundError with open(path, 'r') as f: self._config = yaml.load(f, Loader=yaml.FullLoader) return self<|docstring|>Read configuration from a YAML file. Parameters ---------- file_name : str Name of the file. Defaults to `train_config.yml`. path : str Path to file. Defaults to 'experiment_directory/Configurations/file_name'. Returns ------- BaseExperiment<|endoftext|>
7d4ba846c50e210f9ad456d184f56bc0ce261e4ee4f042bf269c858ac8171c2f
def read_macro(self, path=None): "\n If multiple experiments share the same set of command line args, it can be annoying\n to key them in manually every time. Macros is a secondary config file that can be shared\n between experiments. The main config is updated with the contents of the secondary config,\n which we call a _macro_.\n\n Parameters\n ----------\n path : str\n Path to the macro file. If None, it's read from the command line arg '--macro'.\n If that doesn't work, this function does nothing.\n\n Notes\n -----\n The `path` argument can either be a single path or a list of paths delimited by a colon.\n In other words, the following would work:\n $ python experiment.py ... --macro path/to/macro1.yml:path/to/macro2.yml\n\n Returns\n -------\n BaseExperiment\n " if (path is None): path = self.get_arg('macro') if (path is None): return for _path in path.split(':'): with open(_path, 'r') as f: macro = yaml.load(f, Loader=yaml.FullLoader) MacroReader.update_dict(self._config, macro, copy=False) return self
If multiple experiments share the same set of command line args, it can be annoying to key them in manually every time. Macros is a secondary config file that can be shared between experiments. The main config is updated with the contents of the secondary config, which we call a _macro_. Parameters ---------- path : str Path to the macro file. If None, it's read from the command line arg '--macro'. If that doesn't work, this function does nothing. Notes ----- The `path` argument can either be a single path or a list of paths delimited by a colon. In other words, the following would work: $ python experiment.py ... --macro path/to/macro1.yml:path/to/macro2.yml Returns ------- BaseExperiment
speedrun/core.py
read_macro
nasimrahaman/speedrun
76
python
def read_macro(self, path=None): "\n If multiple experiments share the same set of command line args, it can be annoying\n to key them in manually every time. Macros is a secondary config file that can be shared\n between experiments. The main config is updated with the contents of the secondary config,\n which we call a _macro_.\n\n Parameters\n ----------\n path : str\n Path to the macro file. If None, it's read from the command line arg '--macro'.\n If that doesn't work, this function does nothing.\n\n Notes\n -----\n The `path` argument can either be a single path or a list of paths delimited by a colon.\n In other words, the following would work:\n $ python experiment.py ... --macro path/to/macro1.yml:path/to/macro2.yml\n\n Returns\n -------\n BaseExperiment\n " if (path is None): path = self.get_arg('macro') if (path is None): return for _path in path.split(':'): with open(_path, 'r') as f: macro = yaml.load(f, Loader=yaml.FullLoader) MacroReader.update_dict(self._config, macro, copy=False) return self
def read_macro(self, path=None): "\n If multiple experiments share the same set of command line args, it can be annoying\n to key them in manually every time. Macros is a secondary config file that can be shared\n between experiments. The main config is updated with the contents of the secondary config,\n which we call a _macro_.\n\n Parameters\n ----------\n path : str\n Path to the macro file. If None, it's read from the command line arg '--macro'.\n If that doesn't work, this function does nothing.\n\n Notes\n -----\n The `path` argument can either be a single path or a list of paths delimited by a colon.\n In other words, the following would work:\n $ python experiment.py ... --macro path/to/macro1.yml:path/to/macro2.yml\n\n Returns\n -------\n BaseExperiment\n " if (path is None): path = self.get_arg('macro') if (path is None): return for _path in path.split(':'): with open(_path, 'r') as f: macro = yaml.load(f, Loader=yaml.FullLoader) MacroReader.update_dict(self._config, macro, copy=False) return self<|docstring|>If multiple experiments share the same set of command line args, it can be annoying to key them in manually every time. Macros is a secondary config file that can be shared between experiments. The main config is updated with the contents of the secondary config, which we call a _macro_. Parameters ---------- path : str Path to the macro file. If None, it's read from the command line arg '--macro'. If that doesn't work, this function does nothing. Notes ----- The `path` argument can either be a single path or a list of paths delimited by a colon. In other words, the following would work: $ python experiment.py ... --macro path/to/macro1.yml:path/to/macro2.yml Returns ------- BaseExperiment<|endoftext|>
bed1e25449cb49b4219e45f3048d6e34033f3e9132a9bec29ef2b7ba9a5161fb
def parse_experiment_directory(self): 'Read path to experiment directory from command line arguments.' experiment_directory = self.get_arg(1) if (experiment_directory is None): raise RuntimeError("Can't find experiment directory in command line args.") self.experiment_directory = experiment_directory return self
Read path to experiment directory from command line arguments.
speedrun/core.py
parse_experiment_directory
nasimrahaman/speedrun
76
python
def parse_experiment_directory(self): experiment_directory = self.get_arg(1) if (experiment_directory is None): raise RuntimeError("Can't find experiment directory in command line args.") self.experiment_directory = experiment_directory return self
def parse_experiment_directory(self): experiment_directory = self.get_arg(1) if (experiment_directory is None): raise RuntimeError("Can't find experiment directory in command line args.") self.experiment_directory = experiment_directory return self<|docstring|>Read path to experiment directory from command line arguments.<|endoftext|>
4505853dcbd68a5eaacfd16bfd4b6b268a23669197f1495ed6fbdb0d51c53638
def run(self, *args, **kwargs): "\n Run the experiment. If '--dispatch method' is given as a command line argument, it's\n called with the `args` and `kwargs` provided, as in `self.method(*args, **kwargs)`.\n\n Say the BaseExperiment instance `my_experiment` a method called `train`,\n and it's defined in some `experiment.py` where `my_experiment.run()` is called.\n Calling `python experiment.py --dispatch train` from the command line\n will cause this method to call `my_experiment.train()`.\n\n In addition, this function will also run any pre-dispatch hooks if registered\n (via the `register_pre_dispatch_hook` decorator).\n " try: self.execute_pre_dispatch_hooks() return self.dispatch(self.get_dispatch_key(), *args, **kwargs) finally: self.clean_up()
Run the experiment. If '--dispatch method' is given as a command line argument, it's called with the `args` and `kwargs` provided, as in `self.method(*args, **kwargs)`. Say the BaseExperiment instance `my_experiment` a method called `train`, and it's defined in some `experiment.py` where `my_experiment.run()` is called. Calling `python experiment.py --dispatch train` from the command line will cause this method to call `my_experiment.train()`. In addition, this function will also run any pre-dispatch hooks if registered (via the `register_pre_dispatch_hook` decorator).
speedrun/core.py
run
nasimrahaman/speedrun
76
python
def run(self, *args, **kwargs): "\n Run the experiment. If '--dispatch method' is given as a command line argument, it's\n called with the `args` and `kwargs` provided, as in `self.method(*args, **kwargs)`.\n\n Say the BaseExperiment instance `my_experiment` a method called `train`,\n and it's defined in some `experiment.py` where `my_experiment.run()` is called.\n Calling `python experiment.py --dispatch train` from the command line\n will cause this method to call `my_experiment.train()`.\n\n In addition, this function will also run any pre-dispatch hooks if registered\n (via the `register_pre_dispatch_hook` decorator).\n " try: self.execute_pre_dispatch_hooks() return self.dispatch(self.get_dispatch_key(), *args, **kwargs) finally: self.clean_up()
def run(self, *args, **kwargs): "\n Run the experiment. If '--dispatch method' is given as a command line argument, it's\n called with the `args` and `kwargs` provided, as in `self.method(*args, **kwargs)`.\n\n Say the BaseExperiment instance `my_experiment` a method called `train`,\n and it's defined in some `experiment.py` where `my_experiment.run()` is called.\n Calling `python experiment.py --dispatch train` from the command line\n will cause this method to call `my_experiment.train()`.\n\n In addition, this function will also run any pre-dispatch hooks if registered\n (via the `register_pre_dispatch_hook` decorator).\n " try: self.execute_pre_dispatch_hooks() return self.dispatch(self.get_dispatch_key(), *args, **kwargs) finally: self.clean_up()<|docstring|>Run the experiment. If '--dispatch method' is given as a command line argument, it's called with the `args` and `kwargs` provided, as in `self.method(*args, **kwargs)`. Say the BaseExperiment instance `my_experiment` a method called `train`, and it's defined in some `experiment.py` where `my_experiment.run()` is called. Calling `python experiment.py --dispatch train` from the command line will cause this method to call `my_experiment.train()`. In addition, this function will also run any pre-dispatch hooks if registered (via the `register_pre_dispatch_hook` decorator).<|endoftext|>
cb48dcfbf94d63f6f2eb0aab2bb1eb965414976d564462d7e0586fed4aae276c
def dispatch(self, key, *args, **kwargs): 'Dispatches a method given its name as `key`.' assert hasattr(self, key), f"Trying to dispatch method {key}, but it doesn't exist." return getattr(self, key)(*args, **kwargs)
Dispatches a method given its name as `key`.
speedrun/core.py
dispatch
nasimrahaman/speedrun
76
python
def dispatch(self, key, *args, **kwargs): assert hasattr(self, key), f"Trying to dispatch method {key}, but it doesn't exist." return getattr(self, key)(*args, **kwargs)
def dispatch(self, key, *args, **kwargs): assert hasattr(self, key), f"Trying to dispatch method {key}, but it doesn't exist." return getattr(self, key)(*args, **kwargs)<|docstring|>Dispatches a method given its name as `key`.<|endoftext|>
939e70733f5bee1a574855e862b43ab069aa69acb05e5ce3b626cbf00f22127f
def get_dispatch_key(self): '\n Figures out what function to dispatch.\n Looks for it in the commandline args, instance attribute, decorated functions and class attribute,\n in that order.\n ' if ((self._argv is not None) and (self.get_arg('dispatch', None) is not None)): return self.get_arg('dispatch', ensure_exists=True) elif (self.find_default_dispatch() is not None): return self.find_default_dispatch() elif (self._default_dispatch is not None): return self._default_dispatch elif (self.DEFAULT_DISPATCH is not None): return self.DEFAULT_DISPATCH else: raise RuntimeError('No default dispatch could be found. Please set it first.')
Figures out what function to dispatch. Looks for it in the commandline args, instance attribute, decorated functions and class attribute, in that order.
speedrun/core.py
get_dispatch_key
nasimrahaman/speedrun
76
python
def get_dispatch_key(self): '\n Figures out what function to dispatch.\n Looks for it in the commandline args, instance attribute, decorated functions and class attribute,\n in that order.\n ' if ((self._argv is not None) and (self.get_arg('dispatch', None) is not None)): return self.get_arg('dispatch', ensure_exists=True) elif (self.find_default_dispatch() is not None): return self.find_default_dispatch() elif (self._default_dispatch is not None): return self._default_dispatch elif (self.DEFAULT_DISPATCH is not None): return self.DEFAULT_DISPATCH else: raise RuntimeError('No default dispatch could be found. Please set it first.')
def get_dispatch_key(self): '\n Figures out what function to dispatch.\n Looks for it in the commandline args, instance attribute, decorated functions and class attribute,\n in that order.\n ' if ((self._argv is not None) and (self.get_arg('dispatch', None) is not None)): return self.get_arg('dispatch', ensure_exists=True) elif (self.find_default_dispatch() is not None): return self.find_default_dispatch() elif (self._default_dispatch is not None): return self._default_dispatch elif (self.DEFAULT_DISPATCH is not None): return self.DEFAULT_DISPATCH else: raise RuntimeError('No default dispatch could be found. Please set it first.')<|docstring|>Figures out what function to dispatch. Looks for it in the commandline args, instance attribute, decorated functions and class attribute, in that order.<|endoftext|>
fc9a80f87129b3e87b18ddcd815024e48d1d17e5ae6a3ed8573fa916cb9f501b
@staticmethod def register_default_dispatch(fn): '\n Decorator to mark a method to be dispatched by default.\n\n Examples\n --------\n >>> @BaseExperiment.register_default_dispatch\n ... def my_default_method(self, *args):\n ... return ...\n ' setattr(fn, '__is_speedrun_default_dispatch', True) return fn
Decorator to mark a method to be dispatched by default. Examples -------- >>> @BaseExperiment.register_default_dispatch ... def my_default_method(self, *args): ... return ...
speedrun/core.py
register_default_dispatch
nasimrahaman/speedrun
76
python
@staticmethod def register_default_dispatch(fn): '\n Decorator to mark a method to be dispatched by default.\n\n Examples\n --------\n >>> @BaseExperiment.register_default_dispatch\n ... def my_default_method(self, *args):\n ... return ...\n ' setattr(fn, '__is_speedrun_default_dispatch', True) return fn
@staticmethod def register_default_dispatch(fn): '\n Decorator to mark a method to be dispatched by default.\n\n Examples\n --------\n >>> @BaseExperiment.register_default_dispatch\n ... def my_default_method(self, *args):\n ... return ...\n ' setattr(fn, '__is_speedrun_default_dispatch', True) return fn<|docstring|>Decorator to mark a method to be dispatched by default. Examples -------- >>> @BaseExperiment.register_default_dispatch ... def my_default_method(self, *args): ... return ...<|endoftext|>
c70ab769f4287bceaa7b81a52b10d7bbbc50d2b58d49cec57d9b409d612be960
def set_default_dispatch(self, method_name): '\n Set the default dispatch for _this_ instance.\n\n Parameters\n ----------\n method_name : str\n name of the function that will be dispatched by default.\n\n Returns\n -------\n BaseExperiment\n ' assert (method_name in dir(type(self))), f'Method name {method_name} not found in list of attributes.' assert callable(getattr(type(self), method_name)), f'Default dispatch method name {method_name} should be callable.' self._default_dispatch = method_name return self
Set the default dispatch for _this_ instance. Parameters ---------- method_name : str name of the function that will be dispatched by default. Returns ------- BaseExperiment
speedrun/core.py
set_default_dispatch
nasimrahaman/speedrun
76
python
def set_default_dispatch(self, method_name): '\n Set the default dispatch for _this_ instance.\n\n Parameters\n ----------\n method_name : str\n name of the function that will be dispatched by default.\n\n Returns\n -------\n BaseExperiment\n ' assert (method_name in dir(type(self))), f'Method name {method_name} not found in list of attributes.' assert callable(getattr(type(self), method_name)), f'Default dispatch method name {method_name} should be callable.' self._default_dispatch = method_name return self
def set_default_dispatch(self, method_name): '\n Set the default dispatch for _this_ instance.\n\n Parameters\n ----------\n method_name : str\n name of the function that will be dispatched by default.\n\n Returns\n -------\n BaseExperiment\n ' assert (method_name in dir(type(self))), f'Method name {method_name} not found in list of attributes.' assert callable(getattr(type(self), method_name)), f'Default dispatch method name {method_name} should be callable.' self._default_dispatch = method_name return self<|docstring|>Set the default dispatch for _this_ instance. Parameters ---------- method_name : str name of the function that will be dispatched by default. Returns ------- BaseExperiment<|endoftext|>
33795399d0ad399c72a8d07cc665c01729522e1516806b9b629415a75e2ab47f
def get_default_dispatch(self): 'Get the name of the method used as the default dispatch.' return self._default_dispatch
Get the name of the method used as the default dispatch.
speedrun/core.py
get_default_dispatch
nasimrahaman/speedrun
76
python
def get_default_dispatch(self): return self._default_dispatch
def get_default_dispatch(self): return self._default_dispatch<|docstring|>Get the name of the method used as the default dispatch.<|endoftext|>
30feb9d400c58df30ff6cc0ad387b89850f7728341b9b1c713c7c04e05975ab7
def find_default_dispatch(self): 'Find the name of the function marked as default dispatch.' for attry in dir(type(self)): if getattr(getattr(type(self), attry), '__is_speedrun_default_dispatch', False): return attry
Find the name of the function marked as default dispatch.
speedrun/core.py
find_default_dispatch
nasimrahaman/speedrun
76
python
def find_default_dispatch(self): for attry in dir(type(self)): if getattr(getattr(type(self), attry), '__is_speedrun_default_dispatch', False): return attry
def find_default_dispatch(self): for attry in dir(type(self)): if getattr(getattr(type(self), attry), '__is_speedrun_default_dispatch', False): return attry<|docstring|>Find the name of the function marked as default dispatch.<|endoftext|>
178031c90c070bdf2d06e1c2c1755b07e77c21809d3df3398ef2802ed31cfc3f
@staticmethod def register_pre_dispatch_hook(fn): '\n Decorator to mark a method as a pre-dispatch hook. Pre-dispatch hooks are run before the\n function being dispatched is called.\n ' return BaseExperiment.register_hook(fn, 'pre_dispatch')
Decorator to mark a method as a pre-dispatch hook. Pre-dispatch hooks are run before the function being dispatched is called.
speedrun/core.py
register_pre_dispatch_hook
nasimrahaman/speedrun
76
python
@staticmethod def register_pre_dispatch_hook(fn): '\n Decorator to mark a method as a pre-dispatch hook. Pre-dispatch hooks are run before the\n function being dispatched is called.\n ' return BaseExperiment.register_hook(fn, 'pre_dispatch')
@staticmethod def register_pre_dispatch_hook(fn): '\n Decorator to mark a method as a pre-dispatch hook. Pre-dispatch hooks are run before the\n function being dispatched is called.\n ' return BaseExperiment.register_hook(fn, 'pre_dispatch')<|docstring|>Decorator to mark a method as a pre-dispatch hook. Pre-dispatch hooks are run before the function being dispatched is called.<|endoftext|>
0f0f1317166d7d5894228cb206f64da493911fb517c97266bf6628b587b21c82
def execute_pre_dispatch_hooks(self): 'Execute the pre-dispatch hooks, if available. See also: `register_pre_dispatch_hook`.' return self.execute_hooks('pre_dispatch')
Execute the pre-dispatch hooks, if available. See also: `register_pre_dispatch_hook`.
speedrun/core.py
execute_pre_dispatch_hooks
nasimrahaman/speedrun
76
python
def execute_pre_dispatch_hooks(self): return self.execute_hooks('pre_dispatch')
def execute_pre_dispatch_hooks(self): return self.execute_hooks('pre_dispatch')<|docstring|>Execute the pre-dispatch hooks, if available. See also: `register_pre_dispatch_hook`.<|endoftext|>
dac2422874163336d5fb4f6cd7b3a1bf11989df20659859b3fa5300d51c6d4cc
def clean_up(self): '\n Overridable method to clean up the mess before exiting the process. This method is\n *guaranteed* to be called if `BaseExperiment.run` is used to dispatch (even if the dispatch\n is not successful).\n ' pass
Overridable method to clean up the mess before exiting the process. This method is *guaranteed* to be called if `BaseExperiment.run` is used to dispatch (even if the dispatch is not successful).
speedrun/core.py
clean_up
nasimrahaman/speedrun
76
python
def clean_up(self): '\n Overridable method to clean up the mess before exiting the process. This method is\n *guaranteed* to be called if `BaseExperiment.run` is used to dispatch (even if the dispatch\n is not successful).\n ' pass
def clean_up(self): '\n Overridable method to clean up the mess before exiting the process. This method is\n *guaranteed* to be called if `BaseExperiment.run` is used to dispatch (even if the dispatch\n is not successful).\n ' pass<|docstring|>Overridable method to clean up the mess before exiting the process. This method is *guaranteed* to be called if `BaseExperiment.run` is used to dispatch (even if the dispatch is not successful).<|endoftext|>
980674502e54149d6648b5103f60e694d8d86298e26f00f1c7abf0eb173659a5
def update_git_revision(self, overwrite=False): "\n Updates the configuration with a 'git_rev' field with the current HEAD revision.\n\n Parameters\n ----------\n overwrite : bool\n If a 'git_rev' field already exists, Whether to overwrite it.\n\n Returns\n -------\n BaseExperiment\n " try: gitcmd = ['git', 'rev-parse', '--verify', 'HEAD'] gitrev = subprocess.check_output(gitcmd).decode('latin1').strip() except subprocess.CalledProcessError: gitrev = 'none' if ((not overwrite) and (self.get('git_rev', None) is not None)): pass else: self.set('git_rev', gitrev) return self
Updates the configuration with a 'git_rev' field with the current HEAD revision. Parameters ---------- overwrite : bool If a 'git_rev' field already exists, Whether to overwrite it. Returns ------- BaseExperiment
speedrun/core.py
update_git_revision
nasimrahaman/speedrun
76
python
def update_git_revision(self, overwrite=False): "\n Updates the configuration with a 'git_rev' field with the current HEAD revision.\n\n Parameters\n ----------\n overwrite : bool\n If a 'git_rev' field already exists, Whether to overwrite it.\n\n Returns\n -------\n BaseExperiment\n " try: gitcmd = ['git', 'rev-parse', '--verify', 'HEAD'] gitrev = subprocess.check_output(gitcmd).decode('latin1').strip() except subprocess.CalledProcessError: gitrev = 'none' if ((not overwrite) and (self.get('git_rev', None) is not None)): pass else: self.set('git_rev', gitrev) return self
def update_git_revision(self, overwrite=False): "\n Updates the configuration with a 'git_rev' field with the current HEAD revision.\n\n Parameters\n ----------\n overwrite : bool\n If a 'git_rev' field already exists, Whether to overwrite it.\n\n Returns\n -------\n BaseExperiment\n " try: gitcmd = ['git', 'rev-parse', '--verify', 'HEAD'] gitrev = subprocess.check_output(gitcmd).decode('latin1').strip() except subprocess.CalledProcessError: gitrev = 'none' if ((not overwrite) and (self.get('git_rev', None) is not None)): pass else: self.set('git_rev', gitrev) return self<|docstring|>Updates the configuration with a 'git_rev' field with the current HEAD revision. Parameters ---------- overwrite : bool If a 'git_rev' field already exists, Whether to overwrite it. Returns ------- BaseExperiment<|endoftext|>
741387a60953e1800197c7d65d08255a4e0e0abcb58a9802335c1b42fb488bc6
def auto_setup(self, update_git_revision=True, dump_configuration=True): "\n Set things up automagically.\n\n Parameters\n ----------\n update_git_revision : bool\n Whether to update current configuration with the git revision hash.\n dump_configuration : bool\n Whether to update the configuration in file.\n\n Examples\n --------\n In python file experiment.py:\n >>> experiment = BaseExperiment().auto_setup()\n Let's say the experiment uses the following tags from the config file:\n >>> experiment.get('optimizer/name')\n >>> experiment.get('hyperparameters/lambda')\n\n As command line arguments, if you pass:\n $ python experiment.py /path/to/experiment/directory\n --inherit /path/to/previous/experiment/directory\n --config.optimizer.name RMSprop\n --config.hyperparameters.lambda 0.001\n ... the following happens.\n 1. The configuration file loaded from\n `/path/to/previous/experiment/directory/Configurations/train_config.yml`\n 2. The fields 'optimizer/name' and 'hyperparameters/lambda' are overwritten with the\n provided values ('RMSprop' and 0.001 respectively.)\n 3. The resulting new configuration is dumped to\n `/path/to/experiment/directory/Configurations/train_config.yml`.\n\n Returns\n -------\n BaseExperiment\n " self.record_args() if self.get_arg('purge', False): self.purge_existing_experiment_directory() self.parse_experiment_directory() inherit_from = self.get_arg('inherit') if (inherit_from is not None): self.inherit_configuration(inherit_from, read=False) try: self.read_config_file() except FileNotFoundError: pass self.read_macro() self.update_configuration_from_args() if update_git_revision: self.update_git_revision() if dump_configuration: self.dump_configuration() return self
Set things up automagically. Parameters ---------- update_git_revision : bool Whether to update current configuration with the git revision hash. dump_configuration : bool Whether to update the configuration in file. Examples -------- In python file experiment.py: >>> experiment = BaseExperiment().auto_setup() Let's say the experiment uses the following tags from the config file: >>> experiment.get('optimizer/name') >>> experiment.get('hyperparameters/lambda') As command line arguments, if you pass: $ python experiment.py /path/to/experiment/directory --inherit /path/to/previous/experiment/directory --config.optimizer.name RMSprop --config.hyperparameters.lambda 0.001 ... the following happens. 1. The configuration file loaded from `/path/to/previous/experiment/directory/Configurations/train_config.yml` 2. The fields 'optimizer/name' and 'hyperparameters/lambda' are overwritten with the provided values ('RMSprop' and 0.001 respectively.) 3. The resulting new configuration is dumped to `/path/to/experiment/directory/Configurations/train_config.yml`. Returns ------- BaseExperiment
speedrun/core.py
auto_setup
nasimrahaman/speedrun
76
python
def auto_setup(self, update_git_revision=True, dump_configuration=True): "\n Set things up automagically.\n\n Parameters\n ----------\n update_git_revision : bool\n Whether to update current configuration with the git revision hash.\n dump_configuration : bool\n Whether to update the configuration in file.\n\n Examples\n --------\n In python file experiment.py:\n >>> experiment = BaseExperiment().auto_setup()\n Let's say the experiment uses the following tags from the config file:\n >>> experiment.get('optimizer/name')\n >>> experiment.get('hyperparameters/lambda')\n\n As command line arguments, if you pass:\n $ python experiment.py /path/to/experiment/directory\n --inherit /path/to/previous/experiment/directory\n --config.optimizer.name RMSprop\n --config.hyperparameters.lambda 0.001\n ... the following happens.\n 1. The configuration file loaded from\n `/path/to/previous/experiment/directory/Configurations/train_config.yml`\n 2. The fields 'optimizer/name' and 'hyperparameters/lambda' are overwritten with the\n provided values ('RMSprop' and 0.001 respectively.)\n 3. The resulting new configuration is dumped to\n `/path/to/experiment/directory/Configurations/train_config.yml`.\n\n Returns\n -------\n BaseExperiment\n " self.record_args() if self.get_arg('purge', False): self.purge_existing_experiment_directory() self.parse_experiment_directory() inherit_from = self.get_arg('inherit') if (inherit_from is not None): self.inherit_configuration(inherit_from, read=False) try: self.read_config_file() except FileNotFoundError: pass self.read_macro() self.update_configuration_from_args() if update_git_revision: self.update_git_revision() if dump_configuration: self.dump_configuration() return self
def auto_setup(self, update_git_revision=True, dump_configuration=True): "\n Set things up automagically.\n\n Parameters\n ----------\n update_git_revision : bool\n Whether to update current configuration with the git revision hash.\n dump_configuration : bool\n Whether to update the configuration in file.\n\n Examples\n --------\n In python file experiment.py:\n >>> experiment = BaseExperiment().auto_setup()\n Let's say the experiment uses the following tags from the config file:\n >>> experiment.get('optimizer/name')\n >>> experiment.get('hyperparameters/lambda')\n\n As command line arguments, if you pass:\n $ python experiment.py /path/to/experiment/directory\n --inherit /path/to/previous/experiment/directory\n --config.optimizer.name RMSprop\n --config.hyperparameters.lambda 0.001\n ... the following happens.\n 1. The configuration file loaded from\n `/path/to/previous/experiment/directory/Configurations/train_config.yml`\n 2. The fields 'optimizer/name' and 'hyperparameters/lambda' are overwritten with the\n provided values ('RMSprop' and 0.001 respectively.)\n 3. The resulting new configuration is dumped to\n `/path/to/experiment/directory/Configurations/train_config.yml`.\n\n Returns\n -------\n BaseExperiment\n " self.record_args() if self.get_arg('purge', False): self.purge_existing_experiment_directory() self.parse_experiment_directory() inherit_from = self.get_arg('inherit') if (inherit_from is not None): self.inherit_configuration(inherit_from, read=False) try: self.read_config_file() except FileNotFoundError: pass self.read_macro() self.update_configuration_from_args() if update_git_revision: self.update_git_revision() if dump_configuration: self.dump_configuration() return self<|docstring|>Set things up automagically. Parameters ---------- update_git_revision : bool Whether to update current configuration with the git revision hash. dump_configuration : bool Whether to update the configuration in file. Examples -------- In python file experiment.py: >>> experiment = BaseExperiment().auto_setup() Let's say the experiment uses the following tags from the config file: >>> experiment.get('optimizer/name') >>> experiment.get('hyperparameters/lambda') As command line arguments, if you pass: $ python experiment.py /path/to/experiment/directory --inherit /path/to/previous/experiment/directory --config.optimizer.name RMSprop --config.hyperparameters.lambda 0.001 ... the following happens. 1. The configuration file loaded from `/path/to/previous/experiment/directory/Configurations/train_config.yml` 2. The fields 'optimizer/name' and 'hyperparameters/lambda' are overwritten with the provided values ('RMSprop' and 0.001 respectively.) 3. The resulting new configuration is dumped to `/path/to/experiment/directory/Configurations/train_config.yml`. Returns ------- BaseExperiment<|endoftext|>
2b46debf09380968e1199c4bf8f2c23a4b25c45fd2c8155b9dcb15e1e6d0e25d
def _process_bibtex(file, expected_count=1) -> 'typing.List[EditableFM]': '\n Parse a BibTeX .bib file and return the parsed metadata\n :param file: The .bib file to parse\n :param expected_count: The expected number of entries inside the .bib\n :return: The parsed metadata as a list of EditableFM\n ' parser = BibTexParser(common_strings=True) parser.customization = import_bibtex.convert_to_unicode parser.ignore_nonstandard_types = False with Path(bibtex_dir, file).open('r', encoding='utf-8') as bibtex_file: bib_database = bibtexparser.load(bibtex_file, parser=parser) results = [] for entry in bib_database.entries: results.append(import_bibtex.parse_bibtex_entry(entry, dry_run=True)) assert (len(results) == expected_count) return results
Parse a BibTeX .bib file and return the parsed metadata :param file: The .bib file to parse :param expected_count: The expected number of entries inside the .bib :return: The parsed metadata as a list of EditableFM
tests/test_bibtex_import.py
_process_bibtex
masasomiya/publication-importer
122
python
def _process_bibtex(file, expected_count=1) -> 'typing.List[EditableFM]': '\n Parse a BibTeX .bib file and return the parsed metadata\n :param file: The .bib file to parse\n :param expected_count: The expected number of entries inside the .bib\n :return: The parsed metadata as a list of EditableFM\n ' parser = BibTexParser(common_strings=True) parser.customization = import_bibtex.convert_to_unicode parser.ignore_nonstandard_types = False with Path(bibtex_dir, file).open('r', encoding='utf-8') as bibtex_file: bib_database = bibtexparser.load(bibtex_file, parser=parser) results = [] for entry in bib_database.entries: results.append(import_bibtex.parse_bibtex_entry(entry, dry_run=True)) assert (len(results) == expected_count) return results
def _process_bibtex(file, expected_count=1) -> 'typing.List[EditableFM]': '\n Parse a BibTeX .bib file and return the parsed metadata\n :param file: The .bib file to parse\n :param expected_count: The expected number of entries inside the .bib\n :return: The parsed metadata as a list of EditableFM\n ' parser = BibTexParser(common_strings=True) parser.customization = import_bibtex.convert_to_unicode parser.ignore_nonstandard_types = False with Path(bibtex_dir, file).open('r', encoding='utf-8') as bibtex_file: bib_database = bibtexparser.load(bibtex_file, parser=parser) results = [] for entry in bib_database.entries: results.append(import_bibtex.parse_bibtex_entry(entry, dry_run=True)) assert (len(results) == expected_count) return results<|docstring|>Parse a BibTeX .bib file and return the parsed metadata :param file: The .bib file to parse :param expected_count: The expected number of entries inside the .bib :return: The parsed metadata as a list of EditableFM<|endoftext|>
f1b87abc659dedfa6f2e315c7e68d0186d6cb8b7536b76abb0b6939761064d7c
def _test_publication_type(metadata: EditableFM, expected_type: import_bibtex.PublicationType): '\n Check that the publication_types field of the parsed metadata is set to the expected type.\n ' assert (metadata.fm['publication_types'] == [str(expected_type.value)])
Check that the publication_types field of the parsed metadata is set to the expected type.
tests/test_bibtex_import.py
_test_publication_type
masasomiya/publication-importer
122
python
def _test_publication_type(metadata: EditableFM, expected_type: import_bibtex.PublicationType): '\n \n ' assert (metadata.fm['publication_types'] == [str(expected_type.value)])
def _test_publication_type(metadata: EditableFM, expected_type: import_bibtex.PublicationType): '\n \n ' assert (metadata.fm['publication_types'] == [str(expected_type.value)])<|docstring|>Check that the publication_types field of the parsed metadata is set to the expected type.<|endoftext|>
f1bf76c92b36edbb53bc0765cf8500eee8cfe5d72e7768834835d40c353f8976
def test_bibtex_types(): '\n This test uses the import_bibtex functions to parse a .bib file and checks that the\n resulting metadata has the correct publication type set.\n ' _test_publication_type(_process_bibtex('article.bib')[0], import_bibtex.PublicationType.JournalArticle) for metadata in _process_bibtex('report.bib', expected_count=3): _test_publication_type(metadata, import_bibtex.PublicationType.Report) for metadata in _process_bibtex('thesis.bib', expected_count=3): _test_publication_type(metadata, import_bibtex.PublicationType.Thesis) _test_publication_type(_process_bibtex('book.bib')[0], import_bibtex.PublicationType.Book)
This test uses the import_bibtex functions to parse a .bib file and checks that the resulting metadata has the correct publication type set.
tests/test_bibtex_import.py
test_bibtex_types
masasomiya/publication-importer
122
python
def test_bibtex_types(): '\n This test uses the import_bibtex functions to parse a .bib file and checks that the\n resulting metadata has the correct publication type set.\n ' _test_publication_type(_process_bibtex('article.bib')[0], import_bibtex.PublicationType.JournalArticle) for metadata in _process_bibtex('report.bib', expected_count=3): _test_publication_type(metadata, import_bibtex.PublicationType.Report) for metadata in _process_bibtex('thesis.bib', expected_count=3): _test_publication_type(metadata, import_bibtex.PublicationType.Thesis) _test_publication_type(_process_bibtex('book.bib')[0], import_bibtex.PublicationType.Book)
def test_bibtex_types(): '\n This test uses the import_bibtex functions to parse a .bib file and checks that the\n resulting metadata has the correct publication type set.\n ' _test_publication_type(_process_bibtex('article.bib')[0], import_bibtex.PublicationType.JournalArticle) for metadata in _process_bibtex('report.bib', expected_count=3): _test_publication_type(metadata, import_bibtex.PublicationType.Report) for metadata in _process_bibtex('thesis.bib', expected_count=3): _test_publication_type(metadata, import_bibtex.PublicationType.Thesis) _test_publication_type(_process_bibtex('book.bib')[0], import_bibtex.PublicationType.Book)<|docstring|>This test uses the import_bibtex functions to parse a .bib file and checks that the resulting metadata has the correct publication type set.<|endoftext|>
93742b6aff33e8af7e189413da91b9ee2126de652227cfcb9d52e18af7e782de
def _ensure_api_reqs(): '\n Checks to ensure that all 3rd party libs required by this library\n are met. \n ' global _LAZYAPI_CHECKED if _LAZYAPI_CHECKED: return global fastapi, starlette, httpx, dateparser, pytz global _Request, HttpRequest, HttpResponse, FastAPI if (fastapi is None): fastapi = Lib.import_lib('fastapi', 'fastapi[all]') Lib.reload_module(fastapi) FastAPI = fastapi.FastAPI if (starlette is None): starlette = Lib.import_lib('starlette') Lib.reload_module(starlette) _Request = starlette.requests.Request if (httpx is None): httpx = Lib.import_lib('httpx') Lib.reload_module(httpx) HttpResponse = httpx.Response HttpRequest = httpx.Request if (dateparser is None): dateparser = Lib.import_lib('httpx') Lib.reload_module(dateparser) if (pytz is None): pytz = Lib.import_lib('pytz') Lib.reload_module(pytz) _LAZYAPI_CHECKED = True
Checks to ensure that all 3rd party libs required by this library are met.
lazy/api/base_imports.py
_ensure_api_reqs
trisongz/lazycls
2
python
def _ensure_api_reqs(): '\n Checks to ensure that all 3rd party libs required by this library\n are met. \n ' global _LAZYAPI_CHECKED if _LAZYAPI_CHECKED: return global fastapi, starlette, httpx, dateparser, pytz global _Request, HttpRequest, HttpResponse, FastAPI if (fastapi is None): fastapi = Lib.import_lib('fastapi', 'fastapi[all]') Lib.reload_module(fastapi) FastAPI = fastapi.FastAPI if (starlette is None): starlette = Lib.import_lib('starlette') Lib.reload_module(starlette) _Request = starlette.requests.Request if (httpx is None): httpx = Lib.import_lib('httpx') Lib.reload_module(httpx) HttpResponse = httpx.Response HttpRequest = httpx.Request if (dateparser is None): dateparser = Lib.import_lib('httpx') Lib.reload_module(dateparser) if (pytz is None): pytz = Lib.import_lib('pytz') Lib.reload_module(pytz) _LAZYAPI_CHECKED = True
def _ensure_api_reqs(): '\n Checks to ensure that all 3rd party libs required by this library\n are met. \n ' global _LAZYAPI_CHECKED if _LAZYAPI_CHECKED: return global fastapi, starlette, httpx, dateparser, pytz global _Request, HttpRequest, HttpResponse, FastAPI if (fastapi is None): fastapi = Lib.import_lib('fastapi', 'fastapi[all]') Lib.reload_module(fastapi) FastAPI = fastapi.FastAPI if (starlette is None): starlette = Lib.import_lib('starlette') Lib.reload_module(starlette) _Request = starlette.requests.Request if (httpx is None): httpx = Lib.import_lib('httpx') Lib.reload_module(httpx) HttpResponse = httpx.Response HttpRequest = httpx.Request if (dateparser is None): dateparser = Lib.import_lib('httpx') Lib.reload_module(dateparser) if (pytz is None): pytz = Lib.import_lib('pytz') Lib.reload_module(pytz) _LAZYAPI_CHECKED = True<|docstring|>Checks to ensure that all 3rd party libs required by this library are met.<|endoftext|>
aed109c9568ce0eba06cc483551476f5d08384a861fcda6b1cbf87852a365aa0
def verify_postgresql_version(apps, schema_editor): '\n Verify that PostgreSQL is version 9.4 or higher.\n ' DB_MINIMUM_VERSION = 90400 try: pg_version = connection.pg_version if (pg_version < DB_MINIMUM_VERSION): raise Exception('PostgreSQL 9.4.0 ({}) or higher is required ({} found). Upgrade PostgreSQL and then run migrations again.'.format(DB_MINIMUM_VERSION, pg_version)) except OperationalError: pass
Verify that PostgreSQL is version 9.4 or higher.
netbox/extras/migrations/0001_initial_squashed_0010_customfield_filter_logic.py
verify_postgresql_version
ananace/netbox
6
python
def verify_postgresql_version(apps, schema_editor): '\n \n ' DB_MINIMUM_VERSION = 90400 try: pg_version = connection.pg_version if (pg_version < DB_MINIMUM_VERSION): raise Exception('PostgreSQL 9.4.0 ({}) or higher is required ({} found). Upgrade PostgreSQL and then run migrations again.'.format(DB_MINIMUM_VERSION, pg_version)) except OperationalError: pass
def verify_postgresql_version(apps, schema_editor): '\n \n ' DB_MINIMUM_VERSION = 90400 try: pg_version = connection.pg_version if (pg_version < DB_MINIMUM_VERSION): raise Exception('PostgreSQL 9.4.0 ({}) or higher is required ({} found). Upgrade PostgreSQL and then run migrations again.'.format(DB_MINIMUM_VERSION, pg_version)) except OperationalError: pass<|docstring|>Verify that PostgreSQL is version 9.4 or higher.<|endoftext|>
f117eedd2cf2da083febd476cb35d89aaf5e078407e8725156e27888a3bc1b1f
def test_CSIRO_wire_break(): '\n Spot-check the nominal behavior of the CSIRO wire break test.\n ' p = util.testingProfile.fakeProfile([(- 2.399), (- 2.399), (- 2.4001)], [10, 20, 30], probe_type=2) qc = qctests.CSIRO_wire_break.test(p, None) truth = numpy.zeros(3, dtype=bool) truth[2] = True assert numpy.array_equal(qc, truth), 'failed to flag too-cold temperature at bottom of profile' p = util.testingProfile.fakeProfile([31.99, 31.99, 32.001], [10, 20, 30], probe_type=2) qc = qctests.CSIRO_wire_break.test(p, None) truth = numpy.zeros(3, dtype=bool) truth[2] = True assert numpy.array_equal(qc, truth), 'failed to flag too-hot temperature at bottom of profile' p = util.testingProfile.fakeProfile([(- 2.399), (- 2.399), (- 2.4)], [10, 20, 30], probe_type=2) qc = qctests.CSIRO_wire_break.test(p, None) truth = numpy.zeros(3, dtype=bool) print(qc) print(truth) assert numpy.array_equal(qc, truth), 'flagged marginally cold temperature at bottom of profile' p = util.testingProfile.fakeProfile([31.99, 31.99, 32], [10, 20, 30], probe_type=2) qc = qctests.CSIRO_wire_break.test(p, None) truth = numpy.zeros(3, dtype=bool) assert numpy.array_equal(qc, truth), 'flagged marginally hot temperature at bottom of profile' p = util.testingProfile.fakeProfile([0, 0, (- 100)], [10, 20, 30], probe_type=1) qc = qctests.CSIRO_wire_break.test(p, None) truth = numpy.zeros(3, dtype=bool) assert numpy.array_equal(qc, truth), 'flagged non-xbt profile' p = util.testingProfile.fakeProfile([0, 32.01, 31.99], [10, 20, 30], probe_type=2) qc = qctests.CSIRO_wire_break.test(p, None) truth = numpy.zeros(3, dtype=bool) assert numpy.array_equal(qc, truth), "flagged hot temperature that wasn't at bottom of profile" p = util.testingProfile.fakeProfile([9, 9, 10], [10, 20, 30], probe_type=2) qc = qctests.CSIRO_wire_break.test(p, None) truth = numpy.ones(3, dtype=bool) truth[0] = False assert numpy.array_equal(qc, truth), 'should flag both sides of a gap'
Spot-check the nominal behavior of the CSIRO wire break test.
tests/CSIRO_wire_break_validation.py
test_CSIRO_wire_break
BillMills/AutoQC
17
python
def test_CSIRO_wire_break(): '\n \n ' p = util.testingProfile.fakeProfile([(- 2.399), (- 2.399), (- 2.4001)], [10, 20, 30], probe_type=2) qc = qctests.CSIRO_wire_break.test(p, None) truth = numpy.zeros(3, dtype=bool) truth[2] = True assert numpy.array_equal(qc, truth), 'failed to flag too-cold temperature at bottom of profile' p = util.testingProfile.fakeProfile([31.99, 31.99, 32.001], [10, 20, 30], probe_type=2) qc = qctests.CSIRO_wire_break.test(p, None) truth = numpy.zeros(3, dtype=bool) truth[2] = True assert numpy.array_equal(qc, truth), 'failed to flag too-hot temperature at bottom of profile' p = util.testingProfile.fakeProfile([(- 2.399), (- 2.399), (- 2.4)], [10, 20, 30], probe_type=2) qc = qctests.CSIRO_wire_break.test(p, None) truth = numpy.zeros(3, dtype=bool) print(qc) print(truth) assert numpy.array_equal(qc, truth), 'flagged marginally cold temperature at bottom of profile' p = util.testingProfile.fakeProfile([31.99, 31.99, 32], [10, 20, 30], probe_type=2) qc = qctests.CSIRO_wire_break.test(p, None) truth = numpy.zeros(3, dtype=bool) assert numpy.array_equal(qc, truth), 'flagged marginally hot temperature at bottom of profile' p = util.testingProfile.fakeProfile([0, 0, (- 100)], [10, 20, 30], probe_type=1) qc = qctests.CSIRO_wire_break.test(p, None) truth = numpy.zeros(3, dtype=bool) assert numpy.array_equal(qc, truth), 'flagged non-xbt profile' p = util.testingProfile.fakeProfile([0, 32.01, 31.99], [10, 20, 30], probe_type=2) qc = qctests.CSIRO_wire_break.test(p, None) truth = numpy.zeros(3, dtype=bool) assert numpy.array_equal(qc, truth), "flagged hot temperature that wasn't at bottom of profile" p = util.testingProfile.fakeProfile([9, 9, 10], [10, 20, 30], probe_type=2) qc = qctests.CSIRO_wire_break.test(p, None) truth = numpy.ones(3, dtype=bool) truth[0] = False assert numpy.array_equal(qc, truth), 'should flag both sides of a gap'
def test_CSIRO_wire_break(): '\n \n ' p = util.testingProfile.fakeProfile([(- 2.399), (- 2.399), (- 2.4001)], [10, 20, 30], probe_type=2) qc = qctests.CSIRO_wire_break.test(p, None) truth = numpy.zeros(3, dtype=bool) truth[2] = True assert numpy.array_equal(qc, truth), 'failed to flag too-cold temperature at bottom of profile' p = util.testingProfile.fakeProfile([31.99, 31.99, 32.001], [10, 20, 30], probe_type=2) qc = qctests.CSIRO_wire_break.test(p, None) truth = numpy.zeros(3, dtype=bool) truth[2] = True assert numpy.array_equal(qc, truth), 'failed to flag too-hot temperature at bottom of profile' p = util.testingProfile.fakeProfile([(- 2.399), (- 2.399), (- 2.4)], [10, 20, 30], probe_type=2) qc = qctests.CSIRO_wire_break.test(p, None) truth = numpy.zeros(3, dtype=bool) print(qc) print(truth) assert numpy.array_equal(qc, truth), 'flagged marginally cold temperature at bottom of profile' p = util.testingProfile.fakeProfile([31.99, 31.99, 32], [10, 20, 30], probe_type=2) qc = qctests.CSIRO_wire_break.test(p, None) truth = numpy.zeros(3, dtype=bool) assert numpy.array_equal(qc, truth), 'flagged marginally hot temperature at bottom of profile' p = util.testingProfile.fakeProfile([0, 0, (- 100)], [10, 20, 30], probe_type=1) qc = qctests.CSIRO_wire_break.test(p, None) truth = numpy.zeros(3, dtype=bool) assert numpy.array_equal(qc, truth), 'flagged non-xbt profile' p = util.testingProfile.fakeProfile([0, 32.01, 31.99], [10, 20, 30], probe_type=2) qc = qctests.CSIRO_wire_break.test(p, None) truth = numpy.zeros(3, dtype=bool) assert numpy.array_equal(qc, truth), "flagged hot temperature that wasn't at bottom of profile" p = util.testingProfile.fakeProfile([9, 9, 10], [10, 20, 30], probe_type=2) qc = qctests.CSIRO_wire_break.test(p, None) truth = numpy.ones(3, dtype=bool) truth[0] = False assert numpy.array_equal(qc, truth), 'should flag both sides of a gap'<|docstring|>Spot-check the nominal behavior of the CSIRO wire break test.<|endoftext|>
5ce93d8dd80bcb8d9f59ba8f02941aaa4888a9f9f57d9365bf0a64539a57e55c
def _generate_output_uri_placeholder(port_name: str) -> str: 'Generates the URI placeholder for an output.' return "{{{{$.outputs.artifacts['{}'].uri}}}}".format(port_name)
Generates the URI placeholder for an output.
sdk/python/kfp/dsl/_component_bridge.py
_generate_output_uri_placeholder
aurora-opensource/kubeflow-pipelines
2,860
python
def _generate_output_uri_placeholder(port_name: str) -> str: return "{{{{$.outputs.artifacts['{}'].uri}}}}".format(port_name)
def _generate_output_uri_placeholder(port_name: str) -> str: return "{{{{$.outputs.artifacts['{}'].uri}}}}".format(port_name)<|docstring|>Generates the URI placeholder for an output.<|endoftext|>
3517b91f2595a1afebda42c5186d30aa4a124a985c823fe83e34d209f78967bf
def _generate_input_uri_placeholder(port_name: str) -> str: 'Generates the URI placeholder for an input.' return "{{{{$.inputs.artifacts['{}'].uri}}}}".format(port_name)
Generates the URI placeholder for an input.
sdk/python/kfp/dsl/_component_bridge.py
_generate_input_uri_placeholder
aurora-opensource/kubeflow-pipelines
2,860
python
def _generate_input_uri_placeholder(port_name: str) -> str: return "{{{{$.inputs.artifacts['{}'].uri}}}}".format(port_name)
def _generate_input_uri_placeholder(port_name: str) -> str: return "{{{{$.inputs.artifacts['{}'].uri}}}}".format(port_name)<|docstring|>Generates the URI placeholder for an input.<|endoftext|>
0322ebc0e34983ee42c7a1b075fcf91e816a812ccb80c8a860c787957f7ec8d6
def _generate_output_metadata_path() -> str: 'Generates the URI to write the output metadata JSON file.' return OUTPUT_METADATA_JSON
Generates the URI to write the output metadata JSON file.
sdk/python/kfp/dsl/_component_bridge.py
_generate_output_metadata_path
aurora-opensource/kubeflow-pipelines
2,860
python
def _generate_output_metadata_path() -> str: return OUTPUT_METADATA_JSON
def _generate_output_metadata_path() -> str: return OUTPUT_METADATA_JSON<|docstring|>Generates the URI to write the output metadata JSON file.<|endoftext|>
965cf1755e22b1c7ae64fcf27cc913da11253bb24f0ffe3448cfe04b3d71d06a
def _generate_input_metadata_path(port_name: str) -> str: 'Generates the placeholder for input artifact metadata file.' return str(pathlib.PurePosixPath(OUTPUT_DIR_PLACEHOLDER, RUN_ID_PLACEHOLDER, '{{{{inputs.parameters.{input}}}}}'.format(input=PRODUCER_POD_NAME_PARAMETER.format(port_name)), OUTPUT_METADATA_JSON))
Generates the placeholder for input artifact metadata file.
sdk/python/kfp/dsl/_component_bridge.py
_generate_input_metadata_path
aurora-opensource/kubeflow-pipelines
2,860
python
def _generate_input_metadata_path(port_name: str) -> str: return str(pathlib.PurePosixPath(OUTPUT_DIR_PLACEHOLDER, RUN_ID_PLACEHOLDER, '{{{{inputs.parameters.{input}}}}}'.format(input=PRODUCER_POD_NAME_PARAMETER.format(port_name)), OUTPUT_METADATA_JSON))
def _generate_input_metadata_path(port_name: str) -> str: return str(pathlib.PurePosixPath(OUTPUT_DIR_PLACEHOLDER, RUN_ID_PLACEHOLDER, '{{{{inputs.parameters.{input}}}}}'.format(input=PRODUCER_POD_NAME_PARAMETER.format(port_name)), OUTPUT_METADATA_JSON))<|docstring|>Generates the placeholder for input artifact metadata file.<|endoftext|>
1e792a8ab5b5d5196a5e55c7ee376b85e1479ea08efc08aef66959df836508bf
def _generate_input_output_name(port_name: str) -> str: "Generates the placeholder for input artifact's output name." return INPUT_OUTPUT_NAME_PATTERN.format(port_name)
Generates the placeholder for input artifact's output name.
sdk/python/kfp/dsl/_component_bridge.py
_generate_input_output_name
aurora-opensource/kubeflow-pipelines
2,860
python
def _generate_input_output_name(port_name: str) -> str: return INPUT_OUTPUT_NAME_PATTERN.format(port_name)
def _generate_input_output_name(port_name: str) -> str: return INPUT_OUTPUT_NAME_PATTERN.format(port_name)<|docstring|>Generates the placeholder for input artifact's output name.<|endoftext|>
cf3518a78e610fc81e36c19be75f8076855be4c60e677dd30203c0e2ad7bd74d
def _generate_executor_input() -> str: 'Generates the placeholder for serialized executor input.' return _EXECUTOR_INPUT_PLACEHOLDER
Generates the placeholder for serialized executor input.
sdk/python/kfp/dsl/_component_bridge.py
_generate_executor_input
aurora-opensource/kubeflow-pipelines
2,860
python
def _generate_executor_input() -> str: return _EXECUTOR_INPUT_PLACEHOLDER
def _generate_executor_input() -> str: return _EXECUTOR_INPUT_PLACEHOLDER<|docstring|>Generates the placeholder for serialized executor input.<|endoftext|>
3680f5c1914b868869c0f4d9f312a9d3291b569d790c9cb28a2eb7b803e29573
def _create_container_op_from_component_and_arguments(component_spec: _structures.ComponentSpec, arguments: Mapping[(str, Any)], component_ref: Optional[_structures.ComponentReference]=None) -> _container_op.ContainerOp: 'Instantiates ContainerOp object.\n\n Args:\n component_spec: The component spec object.\n arguments: The dictionary of component arguments.\n component_ref: (only for v1) The component references.\n\n Returns:\n A ContainerOp instance.\n ' arguments = arguments.copy() for input_spec in (component_spec.inputs or []): if ((input_spec.name not in arguments) and (input_spec.default is not None)): default_value = input_spec.default if (input_spec.type == 'Integer'): default_value = int(default_value) elif (input_spec.type == 'Float'): default_value = float(default_value) elif (type_utils.is_parameter_type(input_spec.type) and kfp.COMPILING_FOR_V2): parameter_type = type_utils.get_parameter_type(input_spec.type) default_value = type_utils.deserialize_parameter_value(value=default_value, parameter_type=parameter_type) arguments[input_spec.name] = default_value original_arguments = arguments arguments = arguments.copy() for (input_name, argument_value) in arguments.items(): if isinstance(argument_value, _pipeline_param.PipelineParam): input_type = component_spec._inputs_dict[input_name].type argument_type = argument_value.param_type types.verify_type_compatibility(argument_type, input_type, 'Incompatible argument passed to the input "{}" of component "{}": '.format(input_name, component_spec.name)) arguments[input_name] = str(argument_value) if isinstance(argument_value, _container_op.ContainerOp): raise TypeError('ContainerOp object was passed to component as an input argument. Pass a single output instead.') placeholder_resolver = ExtraPlaceholderResolver() resolved_cmd = _components._resolve_command_line_and_paths(component_spec=component_spec, arguments=arguments, placeholder_resolver=placeholder_resolver.resolve_placeholder) container_spec = component_spec.implementation.container old_warn_value = _container_op.ContainerOp._DISABLE_REUSABLE_COMPONENT_WARNING _container_op.ContainerOp._DISABLE_REUSABLE_COMPONENT_WARNING = True output_paths = collections.OrderedDict((resolved_cmd.output_paths or {})) output_paths.update(placeholder_resolver.output_paths) input_paths = collections.OrderedDict((resolved_cmd.input_paths or {})) input_paths.update(placeholder_resolver.input_paths) artifact_argument_paths = [dsl.InputArgumentPath(argument=arguments[input_name], input=input_name, path=path) for (input_name, path) in input_paths.items()] task = _container_op.ContainerOp(name=(component_spec.name or _components._default_component_name), image=container_spec.image, command=resolved_cmd.command, arguments=resolved_cmd.args, file_outputs=output_paths, artifact_argument_paths=artifact_argument_paths) _container_op.ContainerOp._DISABLE_REUSABLE_COMPONENT_WARNING = old_warn_value component_meta = copy.copy(component_spec) task._set_metadata(component_meta, original_arguments) if component_ref: component_ref_without_spec = copy.copy(component_ref) component_ref_without_spec.spec = None task._component_ref = component_ref_without_spec task._parameter_arguments = resolved_cmd.inputs_consumed_by_value name_to_spec_type = {} if component_meta.inputs: name_to_spec_type = {input.name: {'type': input.type, 'default': input.default} for input in component_meta.inputs} if kfp.COMPILING_FOR_V2: for (name, spec_type) in name_to_spec_type.items(): if ((name in original_arguments) and type_utils.is_parameter_type(spec_type['type'])): if isinstance(original_arguments[name], (list, dict)): task._parameter_arguments[name] = json.dumps(original_arguments[name]) else: task._parameter_arguments[name] = str(original_arguments[name]) for name in list(task.artifact_arguments.keys()): if (name in task._parameter_arguments): del task.artifact_arguments[name] for name in list(task.input_artifact_paths.keys()): if (name in task._parameter_arguments): del task.input_artifact_paths[name] output_names = [output_spec.name for output_spec in (component_spec.outputs or [])] output_name_to_python = _naming.generate_unique_name_conversion_table(output_names, _naming._sanitize_python_function_name) for output_name in output_names: pythonic_output_name = output_name_to_python[output_name] if ((pythonic_output_name not in task.outputs) and (output_name in task.outputs)): task.outputs[pythonic_output_name] = task.outputs[output_name] if container_spec.env: from kubernetes import client as k8s_client for (name, value) in container_spec.env.items(): task.container.add_env_variable(k8s_client.V1EnvVar(name=name, value=value)) if component_spec.metadata: annotations = (component_spec.metadata.annotations or {}) for (key, value) in annotations.items(): task.add_pod_annotation(key, value) for (key, value) in (component_spec.metadata.labels or {}).items(): task.add_pod_label(key, value) if (annotations.get('volatile_component', 'false') == 'true'): task.execution_options.caching_strategy.max_cache_staleness = 'P0D' _attach_v2_specs(task, component_spec, original_arguments) return task
Instantiates ContainerOp object. Args: component_spec: The component spec object. arguments: The dictionary of component arguments. component_ref: (only for v1) The component references. Returns: A ContainerOp instance.
sdk/python/kfp/dsl/_component_bridge.py
_create_container_op_from_component_and_arguments
aurora-opensource/kubeflow-pipelines
2,860
python
def _create_container_op_from_component_and_arguments(component_spec: _structures.ComponentSpec, arguments: Mapping[(str, Any)], component_ref: Optional[_structures.ComponentReference]=None) -> _container_op.ContainerOp: 'Instantiates ContainerOp object.\n\n Args:\n component_spec: The component spec object.\n arguments: The dictionary of component arguments.\n component_ref: (only for v1) The component references.\n\n Returns:\n A ContainerOp instance.\n ' arguments = arguments.copy() for input_spec in (component_spec.inputs or []): if ((input_spec.name not in arguments) and (input_spec.default is not None)): default_value = input_spec.default if (input_spec.type == 'Integer'): default_value = int(default_value) elif (input_spec.type == 'Float'): default_value = float(default_value) elif (type_utils.is_parameter_type(input_spec.type) and kfp.COMPILING_FOR_V2): parameter_type = type_utils.get_parameter_type(input_spec.type) default_value = type_utils.deserialize_parameter_value(value=default_value, parameter_type=parameter_type) arguments[input_spec.name] = default_value original_arguments = arguments arguments = arguments.copy() for (input_name, argument_value) in arguments.items(): if isinstance(argument_value, _pipeline_param.PipelineParam): input_type = component_spec._inputs_dict[input_name].type argument_type = argument_value.param_type types.verify_type_compatibility(argument_type, input_type, 'Incompatible argument passed to the input "{}" of component "{}": '.format(input_name, component_spec.name)) arguments[input_name] = str(argument_value) if isinstance(argument_value, _container_op.ContainerOp): raise TypeError('ContainerOp object was passed to component as an input argument. Pass a single output instead.') placeholder_resolver = ExtraPlaceholderResolver() resolved_cmd = _components._resolve_command_line_and_paths(component_spec=component_spec, arguments=arguments, placeholder_resolver=placeholder_resolver.resolve_placeholder) container_spec = component_spec.implementation.container old_warn_value = _container_op.ContainerOp._DISABLE_REUSABLE_COMPONENT_WARNING _container_op.ContainerOp._DISABLE_REUSABLE_COMPONENT_WARNING = True output_paths = collections.OrderedDict((resolved_cmd.output_paths or {})) output_paths.update(placeholder_resolver.output_paths) input_paths = collections.OrderedDict((resolved_cmd.input_paths or {})) input_paths.update(placeholder_resolver.input_paths) artifact_argument_paths = [dsl.InputArgumentPath(argument=arguments[input_name], input=input_name, path=path) for (input_name, path) in input_paths.items()] task = _container_op.ContainerOp(name=(component_spec.name or _components._default_component_name), image=container_spec.image, command=resolved_cmd.command, arguments=resolved_cmd.args, file_outputs=output_paths, artifact_argument_paths=artifact_argument_paths) _container_op.ContainerOp._DISABLE_REUSABLE_COMPONENT_WARNING = old_warn_value component_meta = copy.copy(component_spec) task._set_metadata(component_meta, original_arguments) if component_ref: component_ref_without_spec = copy.copy(component_ref) component_ref_without_spec.spec = None task._component_ref = component_ref_without_spec task._parameter_arguments = resolved_cmd.inputs_consumed_by_value name_to_spec_type = {} if component_meta.inputs: name_to_spec_type = {input.name: {'type': input.type, 'default': input.default} for input in component_meta.inputs} if kfp.COMPILING_FOR_V2: for (name, spec_type) in name_to_spec_type.items(): if ((name in original_arguments) and type_utils.is_parameter_type(spec_type['type'])): if isinstance(original_arguments[name], (list, dict)): task._parameter_arguments[name] = json.dumps(original_arguments[name]) else: task._parameter_arguments[name] = str(original_arguments[name]) for name in list(task.artifact_arguments.keys()): if (name in task._parameter_arguments): del task.artifact_arguments[name] for name in list(task.input_artifact_paths.keys()): if (name in task._parameter_arguments): del task.input_artifact_paths[name] output_names = [output_spec.name for output_spec in (component_spec.outputs or [])] output_name_to_python = _naming.generate_unique_name_conversion_table(output_names, _naming._sanitize_python_function_name) for output_name in output_names: pythonic_output_name = output_name_to_python[output_name] if ((pythonic_output_name not in task.outputs) and (output_name in task.outputs)): task.outputs[pythonic_output_name] = task.outputs[output_name] if container_spec.env: from kubernetes import client as k8s_client for (name, value) in container_spec.env.items(): task.container.add_env_variable(k8s_client.V1EnvVar(name=name, value=value)) if component_spec.metadata: annotations = (component_spec.metadata.annotations or {}) for (key, value) in annotations.items(): task.add_pod_annotation(key, value) for (key, value) in (component_spec.metadata.labels or {}).items(): task.add_pod_label(key, value) if (annotations.get('volatile_component', 'false') == 'true'): task.execution_options.caching_strategy.max_cache_staleness = 'P0D' _attach_v2_specs(task, component_spec, original_arguments) return task
def _create_container_op_from_component_and_arguments(component_spec: _structures.ComponentSpec, arguments: Mapping[(str, Any)], component_ref: Optional[_structures.ComponentReference]=None) -> _container_op.ContainerOp: 'Instantiates ContainerOp object.\n\n Args:\n component_spec: The component spec object.\n arguments: The dictionary of component arguments.\n component_ref: (only for v1) The component references.\n\n Returns:\n A ContainerOp instance.\n ' arguments = arguments.copy() for input_spec in (component_spec.inputs or []): if ((input_spec.name not in arguments) and (input_spec.default is not None)): default_value = input_spec.default if (input_spec.type == 'Integer'): default_value = int(default_value) elif (input_spec.type == 'Float'): default_value = float(default_value) elif (type_utils.is_parameter_type(input_spec.type) and kfp.COMPILING_FOR_V2): parameter_type = type_utils.get_parameter_type(input_spec.type) default_value = type_utils.deserialize_parameter_value(value=default_value, parameter_type=parameter_type) arguments[input_spec.name] = default_value original_arguments = arguments arguments = arguments.copy() for (input_name, argument_value) in arguments.items(): if isinstance(argument_value, _pipeline_param.PipelineParam): input_type = component_spec._inputs_dict[input_name].type argument_type = argument_value.param_type types.verify_type_compatibility(argument_type, input_type, 'Incompatible argument passed to the input "{}" of component "{}": '.format(input_name, component_spec.name)) arguments[input_name] = str(argument_value) if isinstance(argument_value, _container_op.ContainerOp): raise TypeError('ContainerOp object was passed to component as an input argument. Pass a single output instead.') placeholder_resolver = ExtraPlaceholderResolver() resolved_cmd = _components._resolve_command_line_and_paths(component_spec=component_spec, arguments=arguments, placeholder_resolver=placeholder_resolver.resolve_placeholder) container_spec = component_spec.implementation.container old_warn_value = _container_op.ContainerOp._DISABLE_REUSABLE_COMPONENT_WARNING _container_op.ContainerOp._DISABLE_REUSABLE_COMPONENT_WARNING = True output_paths = collections.OrderedDict((resolved_cmd.output_paths or {})) output_paths.update(placeholder_resolver.output_paths) input_paths = collections.OrderedDict((resolved_cmd.input_paths or {})) input_paths.update(placeholder_resolver.input_paths) artifact_argument_paths = [dsl.InputArgumentPath(argument=arguments[input_name], input=input_name, path=path) for (input_name, path) in input_paths.items()] task = _container_op.ContainerOp(name=(component_spec.name or _components._default_component_name), image=container_spec.image, command=resolved_cmd.command, arguments=resolved_cmd.args, file_outputs=output_paths, artifact_argument_paths=artifact_argument_paths) _container_op.ContainerOp._DISABLE_REUSABLE_COMPONENT_WARNING = old_warn_value component_meta = copy.copy(component_spec) task._set_metadata(component_meta, original_arguments) if component_ref: component_ref_without_spec = copy.copy(component_ref) component_ref_without_spec.spec = None task._component_ref = component_ref_without_spec task._parameter_arguments = resolved_cmd.inputs_consumed_by_value name_to_spec_type = {} if component_meta.inputs: name_to_spec_type = {input.name: {'type': input.type, 'default': input.default} for input in component_meta.inputs} if kfp.COMPILING_FOR_V2: for (name, spec_type) in name_to_spec_type.items(): if ((name in original_arguments) and type_utils.is_parameter_type(spec_type['type'])): if isinstance(original_arguments[name], (list, dict)): task._parameter_arguments[name] = json.dumps(original_arguments[name]) else: task._parameter_arguments[name] = str(original_arguments[name]) for name in list(task.artifact_arguments.keys()): if (name in task._parameter_arguments): del task.artifact_arguments[name] for name in list(task.input_artifact_paths.keys()): if (name in task._parameter_arguments): del task.input_artifact_paths[name] output_names = [output_spec.name for output_spec in (component_spec.outputs or [])] output_name_to_python = _naming.generate_unique_name_conversion_table(output_names, _naming._sanitize_python_function_name) for output_name in output_names: pythonic_output_name = output_name_to_python[output_name] if ((pythonic_output_name not in task.outputs) and (output_name in task.outputs)): task.outputs[pythonic_output_name] = task.outputs[output_name] if container_spec.env: from kubernetes import client as k8s_client for (name, value) in container_spec.env.items(): task.container.add_env_variable(k8s_client.V1EnvVar(name=name, value=value)) if component_spec.metadata: annotations = (component_spec.metadata.annotations or {}) for (key, value) in annotations.items(): task.add_pod_annotation(key, value) for (key, value) in (component_spec.metadata.labels or {}).items(): task.add_pod_label(key, value) if (annotations.get('volatile_component', 'false') == 'true'): task.execution_options.caching_strategy.max_cache_staleness = 'P0D' _attach_v2_specs(task, component_spec, original_arguments) return task<|docstring|>Instantiates ContainerOp object. Args: component_spec: The component spec object. arguments: The dictionary of component arguments. component_ref: (only for v1) The component references. Returns: A ContainerOp instance.<|endoftext|>
141eea0c3985462ce9a4d5ae97dc422846fcde877aecb05093559b15ee03dfa2
def _attach_v2_specs(task: _container_op.ContainerOp, component_spec: _structures.ComponentSpec, arguments: Mapping[(str, Any)]) -> None: "Attaches v2 specs to a ContainerOp object.\n\n Attach v2_specs to the ContainerOp object regardless whether the pipeline is\n being compiled to v1 (Argo yaml) or v2 (IR json).\n However, there're different behaviors for the two cases. Namely, resolved\n commands and arguments, error handling, etc.\n Regarding the difference in error handling, v2 has a stricter requirement on\n input type annotation. For instance, an input without any type annotation is\n viewed as an artifact, and if it's paired with InputValuePlaceholder, an\n error will be thrown at compile time. However, we cannot raise such an error\n in v1, as it wouldn't break existing pipelines.\n\n Args:\n task: The ContainerOp object to attach IR specs.\n component_spec: The component spec object.\n arguments: The dictionary of component arguments.\n " def _resolve_commands_and_args_v2(component_spec: _structures.ComponentSpec, arguments: Mapping[(str, Any)]) -> _components._ResolvedCommandLineAndPaths: 'Resolves the command line argument placeholders for v2 (IR).\n\n Args:\n component_spec: The component spec object.\n arguments: The dictionary of component arguments.\n\n Returns:\n A named tuple: _components._ResolvedCommandLineAndPaths.\n ' inputs_dict = {input_spec.name: input_spec for input_spec in (component_spec.inputs or [])} outputs_dict = {output_spec.name: output_spec for output_spec in (component_spec.outputs or [])} def _input_artifact_uri_placeholder(input_key: str) -> str: if (kfp.COMPILING_FOR_V2 and type_utils.is_parameter_type(inputs_dict[input_key].type)): raise TypeError('Input "{}" with type "{}" cannot be paired with InputUriPlaceholder.'.format(input_key, inputs_dict[input_key].type)) else: return _generate_input_uri_placeholder(input_key) def _input_artifact_path_placeholder(input_key: str) -> str: if (kfp.COMPILING_FOR_V2 and type_utils.is_parameter_type(inputs_dict[input_key].type)): raise TypeError('Input "{}" with type "{}" cannot be paired with InputPathPlaceholder.'.format(input_key, inputs_dict[input_key].type)) else: return "{{{{$.inputs.artifacts['{}'].path}}}}".format(input_key) def _input_parameter_placeholder(input_key: str) -> str: if (kfp.COMPILING_FOR_V2 and (not type_utils.is_parameter_type(inputs_dict[input_key].type))): raise TypeError('Input "{}" with type "{}" cannot be paired with InputValuePlaceholder.'.format(input_key, inputs_dict[input_key].type)) else: return "{{{{$.inputs.parameters['{}']}}}}".format(input_key) def _output_artifact_uri_placeholder(output_key: str) -> str: if (kfp.COMPILING_FOR_V2 and type_utils.is_parameter_type(outputs_dict[output_key].type)): raise TypeError('Output "{}" with type "{}" cannot be paired with OutputUriPlaceholder.'.format(output_key, outputs_dict[output_key].type)) else: return _generate_output_uri_placeholder(output_key) def _output_artifact_path_placeholder(output_key: str) -> str: return "{{{{$.outputs.artifacts['{}'].path}}}}".format(output_key) def _output_parameter_path_placeholder(output_key: str) -> str: return "{{{{$.outputs.parameters['{}'].output_file}}}}".format(output_key) def _resolve_output_path_placeholder(output_key: str) -> str: if type_utils.is_parameter_type(outputs_dict[output_key].type): return _output_parameter_path_placeholder(output_key) else: return _output_artifact_path_placeholder(output_key) placeholder_resolver = ExtraPlaceholderResolver() def _resolve_ir_placeholders_v2(arg, component_spec: _structures.ComponentSpec, arguments: dict) -> str: inputs_dict = {input_spec.name: input_spec for input_spec in (component_spec.inputs or [])} if isinstance(arg, _structures.InputValuePlaceholder): input_name = arg.input_name input_value = arguments.get(input_name, None) if (input_value is not None): return _input_parameter_placeholder(input_name) else: input_spec = inputs_dict[input_name] if input_spec.optional: return None else: raise ValueError('No value provided for input {}'.format(input_name)) elif isinstance(arg, _structures.InputUriPlaceholder): input_name = arg.input_name if (input_name in arguments): input_uri = _input_artifact_uri_placeholder(input_name) return input_uri else: input_spec = inputs_dict[input_name] if input_spec.optional: return None else: raise ValueError('No value provided for input {}'.format(input_name)) elif isinstance(arg, _structures.OutputUriPlaceholder): output_name = arg.output_name output_uri = _output_artifact_uri_placeholder(output_name) return output_uri return placeholder_resolver.resolve_placeholder(arg=arg, component_spec=component_spec, arguments=arguments) resolved_cmd = _components._resolve_command_line_and_paths(component_spec=component_spec, arguments=arguments, input_path_generator=_input_artifact_path_placeholder, output_path_generator=_resolve_output_path_placeholder, placeholder_resolver=_resolve_ir_placeholders_v2) return resolved_cmd pipeline_task_spec = pipeline_spec_pb2.PipelineTaskSpec() arguments = arguments.copy() input_params_set = set([param for param in arguments.values() if isinstance(param, _pipeline_param.PipelineParam)]) for (input_name, argument_value) in arguments.items(): input_type = component_spec._inputs_dict[input_name].type argument_type = None if isinstance(argument_value, _pipeline_param.PipelineParam): argument_type = argument_value.param_type types.verify_type_compatibility(argument_type, input_type, 'Incompatible argument passed to the input "{}" of component "{}": '.format(input_name, component_spec.name)) if ((argument_type is None) and isinstance(argument_value, (_for_loop.LoopArguments, _for_loop.LoopArgumentVariable))): argument_type = 'String' arguments[input_name] = str(argument_value) if type_utils.is_parameter_type(input_type): if argument_value.op_name: pipeline_task_spec.inputs.parameters[input_name].task_output_parameter.producer_task = dsl_utils.sanitize_task_name(argument_value.op_name) pipeline_task_spec.inputs.parameters[input_name].task_output_parameter.output_parameter_key = argument_value.name else: pipeline_task_spec.inputs.parameters[input_name].component_input_parameter = argument_value.name elif argument_value.op_name: pipeline_task_spec.inputs.artifacts[input_name].task_output_artifact.producer_task = dsl_utils.sanitize_task_name(argument_value.op_name) pipeline_task_spec.inputs.artifacts[input_name].task_output_artifact.output_artifact_key = argument_value.name elif isinstance(argument_value, str): argument_type = 'String' pipeline_params = _pipeline_param.extract_pipelineparams_from_any(argument_value) if (pipeline_params and kfp.COMPILING_FOR_V2): for param in pipeline_params: additional_input_name = dsl_component_spec.additional_input_name_for_pipelineparam(param) for (existing_input_name, _) in arguments.items(): if (existing_input_name == additional_input_name): raise ValueError('Name collision between existing input name {} and compiler injected input name {}'.format(existing_input_name, additional_input_name)) input_params_set.add(param) additional_input_placeholder = "{{{{$.inputs.parameters['{}']}}}}".format(additional_input_name) argument_value = argument_value.replace(param.pattern, additional_input_placeholder) if param.op_name: pipeline_task_spec.inputs.parameters[additional_input_name].task_output_parameter.producer_task = dsl_utils.sanitize_task_name(param.op_name) pipeline_task_spec.inputs.parameters[additional_input_name].task_output_parameter.output_parameter_key = param.name else: pipeline_task_spec.inputs.parameters[additional_input_name].component_input_parameter = param.full_name input_type = component_spec._inputs_dict[input_name].type if type_utils.is_parameter_type(input_type): pipeline_task_spec.inputs.parameters[input_name].runtime_value.constant.string_value = argument_value elif isinstance(argument_value, int): argument_type = 'Integer' pipeline_task_spec.inputs.parameters[input_name].runtime_value.constant.number_value = argument_value elif isinstance(argument_value, float): argument_type = 'Float' pipeline_task_spec.inputs.parameters[input_name].runtime_value.constant.number_value = argument_value elif isinstance(argument_value, bool): argument_type = 'Bool' pipeline_task_spec.inputs.parameters[input_name].runtime_value.constant.bool_value = argument_value elif isinstance(argument_value, list): argument_type = 'List' argument_value = map((lambda x: (str(x) if isinstance(x, dsl.PipelineParam) else x)), argument_value) pipeline_task_spec.inputs.parameters[input_name].runtime_value.constant.list_value.extend(argument_value) elif isinstance(argument_value, dict): argument_type = 'Dict' pipeline_task_spec.inputs.parameters[input_name].runtime_value.constant.struct_value.update(argument_value) elif isinstance(argument_value, _container_op.ContainerOp): raise TypeError(f'ContainerOp object {input_name} was passed to component as an input argument. Pass a single output instead.') elif kfp.COMPILING_FOR_V2: raise NotImplementedError(f'Input argument supports only the following types: PipelineParam, str, int, float, bool, dict, and list. Got: "{argument_value}".') argument_is_parameter_type = type_utils.is_parameter_type(argument_type) input_is_parameter_type = type_utils.is_parameter_type(input_type) if (kfp.COMPILING_FOR_V2 and (argument_is_parameter_type != input_is_parameter_type)): if isinstance(argument_value, dsl.PipelineParam): param_or_value_msg = 'PipelineParam "{}"'.format(argument_value.full_name) else: param_or_value_msg = 'value "{}"'.format(argument_value) raise TypeError('Passing {param_or_value} with type "{arg_type}" (as "{arg_category}") to component input "{input_name}" with type "{input_type}" (as "{input_category}") is incompatible. Please fix the type of the component input.'.format(param_or_value=param_or_value_msg, arg_type=argument_type, arg_category=('Parameter' if argument_is_parameter_type else 'Artifact'), input_name=input_name, input_type=input_type, input_category=('Parameter' if input_is_parameter_type else 'Artifact'))) if (not component_spec.name): component_spec.name = _components._default_component_name resolved_cmd = _resolve_commands_and_args_v2(component_spec=component_spec, arguments=arguments) task.container_spec = pipeline_spec_pb2.PipelineDeploymentConfig.PipelineContainerSpec(image=component_spec.implementation.container.image, command=resolved_cmd.command, args=resolved_cmd.args, env=[pipeline_spec_pb2.PipelineDeploymentConfig.PipelineContainerSpec.EnvVar(name=name, value=value) for (name, value) in task.container.env_dict.items()]) pipeline_task_spec.component_ref.name = dsl_utils.sanitize_component_name(task.name) executor_label = dsl_utils.sanitize_executor_label(task.name) task.component_spec = dsl_component_spec.build_component_spec_from_structure(component_spec, executor_label, arguments.keys()) task.task_spec = pipeline_task_spec if kfp.COMPILING_FOR_V2: task.command = resolved_cmd.command task.arguments = resolved_cmd.args task.inputs = list(input_params_set)
Attaches v2 specs to a ContainerOp object. Attach v2_specs to the ContainerOp object regardless whether the pipeline is being compiled to v1 (Argo yaml) or v2 (IR json). However, there're different behaviors for the two cases. Namely, resolved commands and arguments, error handling, etc. Regarding the difference in error handling, v2 has a stricter requirement on input type annotation. For instance, an input without any type annotation is viewed as an artifact, and if it's paired with InputValuePlaceholder, an error will be thrown at compile time. However, we cannot raise such an error in v1, as it wouldn't break existing pipelines. Args: task: The ContainerOp object to attach IR specs. component_spec: The component spec object. arguments: The dictionary of component arguments.
sdk/python/kfp/dsl/_component_bridge.py
_attach_v2_specs
aurora-opensource/kubeflow-pipelines
2,860
python
def _attach_v2_specs(task: _container_op.ContainerOp, component_spec: _structures.ComponentSpec, arguments: Mapping[(str, Any)]) -> None: "Attaches v2 specs to a ContainerOp object.\n\n Attach v2_specs to the ContainerOp object regardless whether the pipeline is\n being compiled to v1 (Argo yaml) or v2 (IR json).\n However, there're different behaviors for the two cases. Namely, resolved\n commands and arguments, error handling, etc.\n Regarding the difference in error handling, v2 has a stricter requirement on\n input type annotation. For instance, an input without any type annotation is\n viewed as an artifact, and if it's paired with InputValuePlaceholder, an\n error will be thrown at compile time. However, we cannot raise such an error\n in v1, as it wouldn't break existing pipelines.\n\n Args:\n task: The ContainerOp object to attach IR specs.\n component_spec: The component spec object.\n arguments: The dictionary of component arguments.\n " def _resolve_commands_and_args_v2(component_spec: _structures.ComponentSpec, arguments: Mapping[(str, Any)]) -> _components._ResolvedCommandLineAndPaths: 'Resolves the command line argument placeholders for v2 (IR).\n\n Args:\n component_spec: The component spec object.\n arguments: The dictionary of component arguments.\n\n Returns:\n A named tuple: _components._ResolvedCommandLineAndPaths.\n ' inputs_dict = {input_spec.name: input_spec for input_spec in (component_spec.inputs or [])} outputs_dict = {output_spec.name: output_spec for output_spec in (component_spec.outputs or [])} def _input_artifact_uri_placeholder(input_key: str) -> str: if (kfp.COMPILING_FOR_V2 and type_utils.is_parameter_type(inputs_dict[input_key].type)): raise TypeError('Input "{}" with type "{}" cannot be paired with InputUriPlaceholder.'.format(input_key, inputs_dict[input_key].type)) else: return _generate_input_uri_placeholder(input_key) def _input_artifact_path_placeholder(input_key: str) -> str: if (kfp.COMPILING_FOR_V2 and type_utils.is_parameter_type(inputs_dict[input_key].type)): raise TypeError('Input "{}" with type "{}" cannot be paired with InputPathPlaceholder.'.format(input_key, inputs_dict[input_key].type)) else: return "{{{{$.inputs.artifacts['{}'].path}}}}".format(input_key) def _input_parameter_placeholder(input_key: str) -> str: if (kfp.COMPILING_FOR_V2 and (not type_utils.is_parameter_type(inputs_dict[input_key].type))): raise TypeError('Input "{}" with type "{}" cannot be paired with InputValuePlaceholder.'.format(input_key, inputs_dict[input_key].type)) else: return "{{{{$.inputs.parameters['{}']}}}}".format(input_key) def _output_artifact_uri_placeholder(output_key: str) -> str: if (kfp.COMPILING_FOR_V2 and type_utils.is_parameter_type(outputs_dict[output_key].type)): raise TypeError('Output "{}" with type "{}" cannot be paired with OutputUriPlaceholder.'.format(output_key, outputs_dict[output_key].type)) else: return _generate_output_uri_placeholder(output_key) def _output_artifact_path_placeholder(output_key: str) -> str: return "{{{{$.outputs.artifacts['{}'].path}}}}".format(output_key) def _output_parameter_path_placeholder(output_key: str) -> str: return "{{{{$.outputs.parameters['{}'].output_file}}}}".format(output_key) def _resolve_output_path_placeholder(output_key: str) -> str: if type_utils.is_parameter_type(outputs_dict[output_key].type): return _output_parameter_path_placeholder(output_key) else: return _output_artifact_path_placeholder(output_key) placeholder_resolver = ExtraPlaceholderResolver() def _resolve_ir_placeholders_v2(arg, component_spec: _structures.ComponentSpec, arguments: dict) -> str: inputs_dict = {input_spec.name: input_spec for input_spec in (component_spec.inputs or [])} if isinstance(arg, _structures.InputValuePlaceholder): input_name = arg.input_name input_value = arguments.get(input_name, None) if (input_value is not None): return _input_parameter_placeholder(input_name) else: input_spec = inputs_dict[input_name] if input_spec.optional: return None else: raise ValueError('No value provided for input {}'.format(input_name)) elif isinstance(arg, _structures.InputUriPlaceholder): input_name = arg.input_name if (input_name in arguments): input_uri = _input_artifact_uri_placeholder(input_name) return input_uri else: input_spec = inputs_dict[input_name] if input_spec.optional: return None else: raise ValueError('No value provided for input {}'.format(input_name)) elif isinstance(arg, _structures.OutputUriPlaceholder): output_name = arg.output_name output_uri = _output_artifact_uri_placeholder(output_name) return output_uri return placeholder_resolver.resolve_placeholder(arg=arg, component_spec=component_spec, arguments=arguments) resolved_cmd = _components._resolve_command_line_and_paths(component_spec=component_spec, arguments=arguments, input_path_generator=_input_artifact_path_placeholder, output_path_generator=_resolve_output_path_placeholder, placeholder_resolver=_resolve_ir_placeholders_v2) return resolved_cmd pipeline_task_spec = pipeline_spec_pb2.PipelineTaskSpec() arguments = arguments.copy() input_params_set = set([param for param in arguments.values() if isinstance(param, _pipeline_param.PipelineParam)]) for (input_name, argument_value) in arguments.items(): input_type = component_spec._inputs_dict[input_name].type argument_type = None if isinstance(argument_value, _pipeline_param.PipelineParam): argument_type = argument_value.param_type types.verify_type_compatibility(argument_type, input_type, 'Incompatible argument passed to the input "{}" of component "{}": '.format(input_name, component_spec.name)) if ((argument_type is None) and isinstance(argument_value, (_for_loop.LoopArguments, _for_loop.LoopArgumentVariable))): argument_type = 'String' arguments[input_name] = str(argument_value) if type_utils.is_parameter_type(input_type): if argument_value.op_name: pipeline_task_spec.inputs.parameters[input_name].task_output_parameter.producer_task = dsl_utils.sanitize_task_name(argument_value.op_name) pipeline_task_spec.inputs.parameters[input_name].task_output_parameter.output_parameter_key = argument_value.name else: pipeline_task_spec.inputs.parameters[input_name].component_input_parameter = argument_value.name elif argument_value.op_name: pipeline_task_spec.inputs.artifacts[input_name].task_output_artifact.producer_task = dsl_utils.sanitize_task_name(argument_value.op_name) pipeline_task_spec.inputs.artifacts[input_name].task_output_artifact.output_artifact_key = argument_value.name elif isinstance(argument_value, str): argument_type = 'String' pipeline_params = _pipeline_param.extract_pipelineparams_from_any(argument_value) if (pipeline_params and kfp.COMPILING_FOR_V2): for param in pipeline_params: additional_input_name = dsl_component_spec.additional_input_name_for_pipelineparam(param) for (existing_input_name, _) in arguments.items(): if (existing_input_name == additional_input_name): raise ValueError('Name collision between existing input name {} and compiler injected input name {}'.format(existing_input_name, additional_input_name)) input_params_set.add(param) additional_input_placeholder = "{{{{$.inputs.parameters['{}']}}}}".format(additional_input_name) argument_value = argument_value.replace(param.pattern, additional_input_placeholder) if param.op_name: pipeline_task_spec.inputs.parameters[additional_input_name].task_output_parameter.producer_task = dsl_utils.sanitize_task_name(param.op_name) pipeline_task_spec.inputs.parameters[additional_input_name].task_output_parameter.output_parameter_key = param.name else: pipeline_task_spec.inputs.parameters[additional_input_name].component_input_parameter = param.full_name input_type = component_spec._inputs_dict[input_name].type if type_utils.is_parameter_type(input_type): pipeline_task_spec.inputs.parameters[input_name].runtime_value.constant.string_value = argument_value elif isinstance(argument_value, int): argument_type = 'Integer' pipeline_task_spec.inputs.parameters[input_name].runtime_value.constant.number_value = argument_value elif isinstance(argument_value, float): argument_type = 'Float' pipeline_task_spec.inputs.parameters[input_name].runtime_value.constant.number_value = argument_value elif isinstance(argument_value, bool): argument_type = 'Bool' pipeline_task_spec.inputs.parameters[input_name].runtime_value.constant.bool_value = argument_value elif isinstance(argument_value, list): argument_type = 'List' argument_value = map((lambda x: (str(x) if isinstance(x, dsl.PipelineParam) else x)), argument_value) pipeline_task_spec.inputs.parameters[input_name].runtime_value.constant.list_value.extend(argument_value) elif isinstance(argument_value, dict): argument_type = 'Dict' pipeline_task_spec.inputs.parameters[input_name].runtime_value.constant.struct_value.update(argument_value) elif isinstance(argument_value, _container_op.ContainerOp): raise TypeError(f'ContainerOp object {input_name} was passed to component as an input argument. Pass a single output instead.') elif kfp.COMPILING_FOR_V2: raise NotImplementedError(f'Input argument supports only the following types: PipelineParam, str, int, float, bool, dict, and list. Got: "{argument_value}".') argument_is_parameter_type = type_utils.is_parameter_type(argument_type) input_is_parameter_type = type_utils.is_parameter_type(input_type) if (kfp.COMPILING_FOR_V2 and (argument_is_parameter_type != input_is_parameter_type)): if isinstance(argument_value, dsl.PipelineParam): param_or_value_msg = 'PipelineParam "{}"'.format(argument_value.full_name) else: param_or_value_msg = 'value "{}"'.format(argument_value) raise TypeError('Passing {param_or_value} with type "{arg_type}" (as "{arg_category}") to component input "{input_name}" with type "{input_type}" (as "{input_category}") is incompatible. Please fix the type of the component input.'.format(param_or_value=param_or_value_msg, arg_type=argument_type, arg_category=('Parameter' if argument_is_parameter_type else 'Artifact'), input_name=input_name, input_type=input_type, input_category=('Parameter' if input_is_parameter_type else 'Artifact'))) if (not component_spec.name): component_spec.name = _components._default_component_name resolved_cmd = _resolve_commands_and_args_v2(component_spec=component_spec, arguments=arguments) task.container_spec = pipeline_spec_pb2.PipelineDeploymentConfig.PipelineContainerSpec(image=component_spec.implementation.container.image, command=resolved_cmd.command, args=resolved_cmd.args, env=[pipeline_spec_pb2.PipelineDeploymentConfig.PipelineContainerSpec.EnvVar(name=name, value=value) for (name, value) in task.container.env_dict.items()]) pipeline_task_spec.component_ref.name = dsl_utils.sanitize_component_name(task.name) executor_label = dsl_utils.sanitize_executor_label(task.name) task.component_spec = dsl_component_spec.build_component_spec_from_structure(component_spec, executor_label, arguments.keys()) task.task_spec = pipeline_task_spec if kfp.COMPILING_FOR_V2: task.command = resolved_cmd.command task.arguments = resolved_cmd.args task.inputs = list(input_params_set)
def _attach_v2_specs(task: _container_op.ContainerOp, component_spec: _structures.ComponentSpec, arguments: Mapping[(str, Any)]) -> None: "Attaches v2 specs to a ContainerOp object.\n\n Attach v2_specs to the ContainerOp object regardless whether the pipeline is\n being compiled to v1 (Argo yaml) or v2 (IR json).\n However, there're different behaviors for the two cases. Namely, resolved\n commands and arguments, error handling, etc.\n Regarding the difference in error handling, v2 has a stricter requirement on\n input type annotation. For instance, an input without any type annotation is\n viewed as an artifact, and if it's paired with InputValuePlaceholder, an\n error will be thrown at compile time. However, we cannot raise such an error\n in v1, as it wouldn't break existing pipelines.\n\n Args:\n task: The ContainerOp object to attach IR specs.\n component_spec: The component spec object.\n arguments: The dictionary of component arguments.\n " def _resolve_commands_and_args_v2(component_spec: _structures.ComponentSpec, arguments: Mapping[(str, Any)]) -> _components._ResolvedCommandLineAndPaths: 'Resolves the command line argument placeholders for v2 (IR).\n\n Args:\n component_spec: The component spec object.\n arguments: The dictionary of component arguments.\n\n Returns:\n A named tuple: _components._ResolvedCommandLineAndPaths.\n ' inputs_dict = {input_spec.name: input_spec for input_spec in (component_spec.inputs or [])} outputs_dict = {output_spec.name: output_spec for output_spec in (component_spec.outputs or [])} def _input_artifact_uri_placeholder(input_key: str) -> str: if (kfp.COMPILING_FOR_V2 and type_utils.is_parameter_type(inputs_dict[input_key].type)): raise TypeError('Input "{}" with type "{}" cannot be paired with InputUriPlaceholder.'.format(input_key, inputs_dict[input_key].type)) else: return _generate_input_uri_placeholder(input_key) def _input_artifact_path_placeholder(input_key: str) -> str: if (kfp.COMPILING_FOR_V2 and type_utils.is_parameter_type(inputs_dict[input_key].type)): raise TypeError('Input "{}" with type "{}" cannot be paired with InputPathPlaceholder.'.format(input_key, inputs_dict[input_key].type)) else: return "{{{{$.inputs.artifacts['{}'].path}}}}".format(input_key) def _input_parameter_placeholder(input_key: str) -> str: if (kfp.COMPILING_FOR_V2 and (not type_utils.is_parameter_type(inputs_dict[input_key].type))): raise TypeError('Input "{}" with type "{}" cannot be paired with InputValuePlaceholder.'.format(input_key, inputs_dict[input_key].type)) else: return "{{{{$.inputs.parameters['{}']}}}}".format(input_key) def _output_artifact_uri_placeholder(output_key: str) -> str: if (kfp.COMPILING_FOR_V2 and type_utils.is_parameter_type(outputs_dict[output_key].type)): raise TypeError('Output "{}" with type "{}" cannot be paired with OutputUriPlaceholder.'.format(output_key, outputs_dict[output_key].type)) else: return _generate_output_uri_placeholder(output_key) def _output_artifact_path_placeholder(output_key: str) -> str: return "{{{{$.outputs.artifacts['{}'].path}}}}".format(output_key) def _output_parameter_path_placeholder(output_key: str) -> str: return "{{{{$.outputs.parameters['{}'].output_file}}}}".format(output_key) def _resolve_output_path_placeholder(output_key: str) -> str: if type_utils.is_parameter_type(outputs_dict[output_key].type): return _output_parameter_path_placeholder(output_key) else: return _output_artifact_path_placeholder(output_key) placeholder_resolver = ExtraPlaceholderResolver() def _resolve_ir_placeholders_v2(arg, component_spec: _structures.ComponentSpec, arguments: dict) -> str: inputs_dict = {input_spec.name: input_spec for input_spec in (component_spec.inputs or [])} if isinstance(arg, _structures.InputValuePlaceholder): input_name = arg.input_name input_value = arguments.get(input_name, None) if (input_value is not None): return _input_parameter_placeholder(input_name) else: input_spec = inputs_dict[input_name] if input_spec.optional: return None else: raise ValueError('No value provided for input {}'.format(input_name)) elif isinstance(arg, _structures.InputUriPlaceholder): input_name = arg.input_name if (input_name in arguments): input_uri = _input_artifact_uri_placeholder(input_name) return input_uri else: input_spec = inputs_dict[input_name] if input_spec.optional: return None else: raise ValueError('No value provided for input {}'.format(input_name)) elif isinstance(arg, _structures.OutputUriPlaceholder): output_name = arg.output_name output_uri = _output_artifact_uri_placeholder(output_name) return output_uri return placeholder_resolver.resolve_placeholder(arg=arg, component_spec=component_spec, arguments=arguments) resolved_cmd = _components._resolve_command_line_and_paths(component_spec=component_spec, arguments=arguments, input_path_generator=_input_artifact_path_placeholder, output_path_generator=_resolve_output_path_placeholder, placeholder_resolver=_resolve_ir_placeholders_v2) return resolved_cmd pipeline_task_spec = pipeline_spec_pb2.PipelineTaskSpec() arguments = arguments.copy() input_params_set = set([param for param in arguments.values() if isinstance(param, _pipeline_param.PipelineParam)]) for (input_name, argument_value) in arguments.items(): input_type = component_spec._inputs_dict[input_name].type argument_type = None if isinstance(argument_value, _pipeline_param.PipelineParam): argument_type = argument_value.param_type types.verify_type_compatibility(argument_type, input_type, 'Incompatible argument passed to the input "{}" of component "{}": '.format(input_name, component_spec.name)) if ((argument_type is None) and isinstance(argument_value, (_for_loop.LoopArguments, _for_loop.LoopArgumentVariable))): argument_type = 'String' arguments[input_name] = str(argument_value) if type_utils.is_parameter_type(input_type): if argument_value.op_name: pipeline_task_spec.inputs.parameters[input_name].task_output_parameter.producer_task = dsl_utils.sanitize_task_name(argument_value.op_name) pipeline_task_spec.inputs.parameters[input_name].task_output_parameter.output_parameter_key = argument_value.name else: pipeline_task_spec.inputs.parameters[input_name].component_input_parameter = argument_value.name elif argument_value.op_name: pipeline_task_spec.inputs.artifacts[input_name].task_output_artifact.producer_task = dsl_utils.sanitize_task_name(argument_value.op_name) pipeline_task_spec.inputs.artifacts[input_name].task_output_artifact.output_artifact_key = argument_value.name elif isinstance(argument_value, str): argument_type = 'String' pipeline_params = _pipeline_param.extract_pipelineparams_from_any(argument_value) if (pipeline_params and kfp.COMPILING_FOR_V2): for param in pipeline_params: additional_input_name = dsl_component_spec.additional_input_name_for_pipelineparam(param) for (existing_input_name, _) in arguments.items(): if (existing_input_name == additional_input_name): raise ValueError('Name collision between existing input name {} and compiler injected input name {}'.format(existing_input_name, additional_input_name)) input_params_set.add(param) additional_input_placeholder = "{{{{$.inputs.parameters['{}']}}}}".format(additional_input_name) argument_value = argument_value.replace(param.pattern, additional_input_placeholder) if param.op_name: pipeline_task_spec.inputs.parameters[additional_input_name].task_output_parameter.producer_task = dsl_utils.sanitize_task_name(param.op_name) pipeline_task_spec.inputs.parameters[additional_input_name].task_output_parameter.output_parameter_key = param.name else: pipeline_task_spec.inputs.parameters[additional_input_name].component_input_parameter = param.full_name input_type = component_spec._inputs_dict[input_name].type if type_utils.is_parameter_type(input_type): pipeline_task_spec.inputs.parameters[input_name].runtime_value.constant.string_value = argument_value elif isinstance(argument_value, int): argument_type = 'Integer' pipeline_task_spec.inputs.parameters[input_name].runtime_value.constant.number_value = argument_value elif isinstance(argument_value, float): argument_type = 'Float' pipeline_task_spec.inputs.parameters[input_name].runtime_value.constant.number_value = argument_value elif isinstance(argument_value, bool): argument_type = 'Bool' pipeline_task_spec.inputs.parameters[input_name].runtime_value.constant.bool_value = argument_value elif isinstance(argument_value, list): argument_type = 'List' argument_value = map((lambda x: (str(x) if isinstance(x, dsl.PipelineParam) else x)), argument_value) pipeline_task_spec.inputs.parameters[input_name].runtime_value.constant.list_value.extend(argument_value) elif isinstance(argument_value, dict): argument_type = 'Dict' pipeline_task_spec.inputs.parameters[input_name].runtime_value.constant.struct_value.update(argument_value) elif isinstance(argument_value, _container_op.ContainerOp): raise TypeError(f'ContainerOp object {input_name} was passed to component as an input argument. Pass a single output instead.') elif kfp.COMPILING_FOR_V2: raise NotImplementedError(f'Input argument supports only the following types: PipelineParam, str, int, float, bool, dict, and list. Got: "{argument_value}".') argument_is_parameter_type = type_utils.is_parameter_type(argument_type) input_is_parameter_type = type_utils.is_parameter_type(input_type) if (kfp.COMPILING_FOR_V2 and (argument_is_parameter_type != input_is_parameter_type)): if isinstance(argument_value, dsl.PipelineParam): param_or_value_msg = 'PipelineParam "{}"'.format(argument_value.full_name) else: param_or_value_msg = 'value "{}"'.format(argument_value) raise TypeError('Passing {param_or_value} with type "{arg_type}" (as "{arg_category}") to component input "{input_name}" with type "{input_type}" (as "{input_category}") is incompatible. Please fix the type of the component input.'.format(param_or_value=param_or_value_msg, arg_type=argument_type, arg_category=('Parameter' if argument_is_parameter_type else 'Artifact'), input_name=input_name, input_type=input_type, input_category=('Parameter' if input_is_parameter_type else 'Artifact'))) if (not component_spec.name): component_spec.name = _components._default_component_name resolved_cmd = _resolve_commands_and_args_v2(component_spec=component_spec, arguments=arguments) task.container_spec = pipeline_spec_pb2.PipelineDeploymentConfig.PipelineContainerSpec(image=component_spec.implementation.container.image, command=resolved_cmd.command, args=resolved_cmd.args, env=[pipeline_spec_pb2.PipelineDeploymentConfig.PipelineContainerSpec.EnvVar(name=name, value=value) for (name, value) in task.container.env_dict.items()]) pipeline_task_spec.component_ref.name = dsl_utils.sanitize_component_name(task.name) executor_label = dsl_utils.sanitize_executor_label(task.name) task.component_spec = dsl_component_spec.build_component_spec_from_structure(component_spec, executor_label, arguments.keys()) task.task_spec = pipeline_task_spec if kfp.COMPILING_FOR_V2: task.command = resolved_cmd.command task.arguments = resolved_cmd.args task.inputs = list(input_params_set)<|docstring|>Attaches v2 specs to a ContainerOp object. Attach v2_specs to the ContainerOp object regardless whether the pipeline is being compiled to v1 (Argo yaml) or v2 (IR json). However, there're different behaviors for the two cases. Namely, resolved commands and arguments, error handling, etc. Regarding the difference in error handling, v2 has a stricter requirement on input type annotation. For instance, an input without any type annotation is viewed as an artifact, and if it's paired with InputValuePlaceholder, an error will be thrown at compile time. However, we cannot raise such an error in v1, as it wouldn't break existing pipelines. Args: task: The ContainerOp object to attach IR specs. component_spec: The component spec object. arguments: The dictionary of component arguments.<|endoftext|>
34f9ae904696ba769810659b2cec0e4b1d34f7d69489110bc2c0aee31006c2b5
def _resolve_commands_and_args_v2(component_spec: _structures.ComponentSpec, arguments: Mapping[(str, Any)]) -> _components._ResolvedCommandLineAndPaths: 'Resolves the command line argument placeholders for v2 (IR).\n\n Args:\n component_spec: The component spec object.\n arguments: The dictionary of component arguments.\n\n Returns:\n A named tuple: _components._ResolvedCommandLineAndPaths.\n ' inputs_dict = {input_spec.name: input_spec for input_spec in (component_spec.inputs or [])} outputs_dict = {output_spec.name: output_spec for output_spec in (component_spec.outputs or [])} def _input_artifact_uri_placeholder(input_key: str) -> str: if (kfp.COMPILING_FOR_V2 and type_utils.is_parameter_type(inputs_dict[input_key].type)): raise TypeError('Input "{}" with type "{}" cannot be paired with InputUriPlaceholder.'.format(input_key, inputs_dict[input_key].type)) else: return _generate_input_uri_placeholder(input_key) def _input_artifact_path_placeholder(input_key: str) -> str: if (kfp.COMPILING_FOR_V2 and type_utils.is_parameter_type(inputs_dict[input_key].type)): raise TypeError('Input "{}" with type "{}" cannot be paired with InputPathPlaceholder.'.format(input_key, inputs_dict[input_key].type)) else: return "{{{{$.inputs.artifacts['{}'].path}}}}".format(input_key) def _input_parameter_placeholder(input_key: str) -> str: if (kfp.COMPILING_FOR_V2 and (not type_utils.is_parameter_type(inputs_dict[input_key].type))): raise TypeError('Input "{}" with type "{}" cannot be paired with InputValuePlaceholder.'.format(input_key, inputs_dict[input_key].type)) else: return "{{{{$.inputs.parameters['{}']}}}}".format(input_key) def _output_artifact_uri_placeholder(output_key: str) -> str: if (kfp.COMPILING_FOR_V2 and type_utils.is_parameter_type(outputs_dict[output_key].type)): raise TypeError('Output "{}" with type "{}" cannot be paired with OutputUriPlaceholder.'.format(output_key, outputs_dict[output_key].type)) else: return _generate_output_uri_placeholder(output_key) def _output_artifact_path_placeholder(output_key: str) -> str: return "{{{{$.outputs.artifacts['{}'].path}}}}".format(output_key) def _output_parameter_path_placeholder(output_key: str) -> str: return "{{{{$.outputs.parameters['{}'].output_file}}}}".format(output_key) def _resolve_output_path_placeholder(output_key: str) -> str: if type_utils.is_parameter_type(outputs_dict[output_key].type): return _output_parameter_path_placeholder(output_key) else: return _output_artifact_path_placeholder(output_key) placeholder_resolver = ExtraPlaceholderResolver() def _resolve_ir_placeholders_v2(arg, component_spec: _structures.ComponentSpec, arguments: dict) -> str: inputs_dict = {input_spec.name: input_spec for input_spec in (component_spec.inputs or [])} if isinstance(arg, _structures.InputValuePlaceholder): input_name = arg.input_name input_value = arguments.get(input_name, None) if (input_value is not None): return _input_parameter_placeholder(input_name) else: input_spec = inputs_dict[input_name] if input_spec.optional: return None else: raise ValueError('No value provided for input {}'.format(input_name)) elif isinstance(arg, _structures.InputUriPlaceholder): input_name = arg.input_name if (input_name in arguments): input_uri = _input_artifact_uri_placeholder(input_name) return input_uri else: input_spec = inputs_dict[input_name] if input_spec.optional: return None else: raise ValueError('No value provided for input {}'.format(input_name)) elif isinstance(arg, _structures.OutputUriPlaceholder): output_name = arg.output_name output_uri = _output_artifact_uri_placeholder(output_name) return output_uri return placeholder_resolver.resolve_placeholder(arg=arg, component_spec=component_spec, arguments=arguments) resolved_cmd = _components._resolve_command_line_and_paths(component_spec=component_spec, arguments=arguments, input_path_generator=_input_artifact_path_placeholder, output_path_generator=_resolve_output_path_placeholder, placeholder_resolver=_resolve_ir_placeholders_v2) return resolved_cmd
Resolves the command line argument placeholders for v2 (IR). Args: component_spec: The component spec object. arguments: The dictionary of component arguments. Returns: A named tuple: _components._ResolvedCommandLineAndPaths.
sdk/python/kfp/dsl/_component_bridge.py
_resolve_commands_and_args_v2
aurora-opensource/kubeflow-pipelines
2,860
python
def _resolve_commands_and_args_v2(component_spec: _structures.ComponentSpec, arguments: Mapping[(str, Any)]) -> _components._ResolvedCommandLineAndPaths: 'Resolves the command line argument placeholders for v2 (IR).\n\n Args:\n component_spec: The component spec object.\n arguments: The dictionary of component arguments.\n\n Returns:\n A named tuple: _components._ResolvedCommandLineAndPaths.\n ' inputs_dict = {input_spec.name: input_spec for input_spec in (component_spec.inputs or [])} outputs_dict = {output_spec.name: output_spec for output_spec in (component_spec.outputs or [])} def _input_artifact_uri_placeholder(input_key: str) -> str: if (kfp.COMPILING_FOR_V2 and type_utils.is_parameter_type(inputs_dict[input_key].type)): raise TypeError('Input "{}" with type "{}" cannot be paired with InputUriPlaceholder.'.format(input_key, inputs_dict[input_key].type)) else: return _generate_input_uri_placeholder(input_key) def _input_artifact_path_placeholder(input_key: str) -> str: if (kfp.COMPILING_FOR_V2 and type_utils.is_parameter_type(inputs_dict[input_key].type)): raise TypeError('Input "{}" with type "{}" cannot be paired with InputPathPlaceholder.'.format(input_key, inputs_dict[input_key].type)) else: return "{{{{$.inputs.artifacts['{}'].path}}}}".format(input_key) def _input_parameter_placeholder(input_key: str) -> str: if (kfp.COMPILING_FOR_V2 and (not type_utils.is_parameter_type(inputs_dict[input_key].type))): raise TypeError('Input "{}" with type "{}" cannot be paired with InputValuePlaceholder.'.format(input_key, inputs_dict[input_key].type)) else: return "{{{{$.inputs.parameters['{}']}}}}".format(input_key) def _output_artifact_uri_placeholder(output_key: str) -> str: if (kfp.COMPILING_FOR_V2 and type_utils.is_parameter_type(outputs_dict[output_key].type)): raise TypeError('Output "{}" with type "{}" cannot be paired with OutputUriPlaceholder.'.format(output_key, outputs_dict[output_key].type)) else: return _generate_output_uri_placeholder(output_key) def _output_artifact_path_placeholder(output_key: str) -> str: return "{{{{$.outputs.artifacts['{}'].path}}}}".format(output_key) def _output_parameter_path_placeholder(output_key: str) -> str: return "{{{{$.outputs.parameters['{}'].output_file}}}}".format(output_key) def _resolve_output_path_placeholder(output_key: str) -> str: if type_utils.is_parameter_type(outputs_dict[output_key].type): return _output_parameter_path_placeholder(output_key) else: return _output_artifact_path_placeholder(output_key) placeholder_resolver = ExtraPlaceholderResolver() def _resolve_ir_placeholders_v2(arg, component_spec: _structures.ComponentSpec, arguments: dict) -> str: inputs_dict = {input_spec.name: input_spec for input_spec in (component_spec.inputs or [])} if isinstance(arg, _structures.InputValuePlaceholder): input_name = arg.input_name input_value = arguments.get(input_name, None) if (input_value is not None): return _input_parameter_placeholder(input_name) else: input_spec = inputs_dict[input_name] if input_spec.optional: return None else: raise ValueError('No value provided for input {}'.format(input_name)) elif isinstance(arg, _structures.InputUriPlaceholder): input_name = arg.input_name if (input_name in arguments): input_uri = _input_artifact_uri_placeholder(input_name) return input_uri else: input_spec = inputs_dict[input_name] if input_spec.optional: return None else: raise ValueError('No value provided for input {}'.format(input_name)) elif isinstance(arg, _structures.OutputUriPlaceholder): output_name = arg.output_name output_uri = _output_artifact_uri_placeholder(output_name) return output_uri return placeholder_resolver.resolve_placeholder(arg=arg, component_spec=component_spec, arguments=arguments) resolved_cmd = _components._resolve_command_line_and_paths(component_spec=component_spec, arguments=arguments, input_path_generator=_input_artifact_path_placeholder, output_path_generator=_resolve_output_path_placeholder, placeholder_resolver=_resolve_ir_placeholders_v2) return resolved_cmd
def _resolve_commands_and_args_v2(component_spec: _structures.ComponentSpec, arguments: Mapping[(str, Any)]) -> _components._ResolvedCommandLineAndPaths: 'Resolves the command line argument placeholders for v2 (IR).\n\n Args:\n component_spec: The component spec object.\n arguments: The dictionary of component arguments.\n\n Returns:\n A named tuple: _components._ResolvedCommandLineAndPaths.\n ' inputs_dict = {input_spec.name: input_spec for input_spec in (component_spec.inputs or [])} outputs_dict = {output_spec.name: output_spec for output_spec in (component_spec.outputs or [])} def _input_artifact_uri_placeholder(input_key: str) -> str: if (kfp.COMPILING_FOR_V2 and type_utils.is_parameter_type(inputs_dict[input_key].type)): raise TypeError('Input "{}" with type "{}" cannot be paired with InputUriPlaceholder.'.format(input_key, inputs_dict[input_key].type)) else: return _generate_input_uri_placeholder(input_key) def _input_artifact_path_placeholder(input_key: str) -> str: if (kfp.COMPILING_FOR_V2 and type_utils.is_parameter_type(inputs_dict[input_key].type)): raise TypeError('Input "{}" with type "{}" cannot be paired with InputPathPlaceholder.'.format(input_key, inputs_dict[input_key].type)) else: return "{{{{$.inputs.artifacts['{}'].path}}}}".format(input_key) def _input_parameter_placeholder(input_key: str) -> str: if (kfp.COMPILING_FOR_V2 and (not type_utils.is_parameter_type(inputs_dict[input_key].type))): raise TypeError('Input "{}" with type "{}" cannot be paired with InputValuePlaceholder.'.format(input_key, inputs_dict[input_key].type)) else: return "{{{{$.inputs.parameters['{}']}}}}".format(input_key) def _output_artifact_uri_placeholder(output_key: str) -> str: if (kfp.COMPILING_FOR_V2 and type_utils.is_parameter_type(outputs_dict[output_key].type)): raise TypeError('Output "{}" with type "{}" cannot be paired with OutputUriPlaceholder.'.format(output_key, outputs_dict[output_key].type)) else: return _generate_output_uri_placeholder(output_key) def _output_artifact_path_placeholder(output_key: str) -> str: return "{{{{$.outputs.artifacts['{}'].path}}}}".format(output_key) def _output_parameter_path_placeholder(output_key: str) -> str: return "{{{{$.outputs.parameters['{}'].output_file}}}}".format(output_key) def _resolve_output_path_placeholder(output_key: str) -> str: if type_utils.is_parameter_type(outputs_dict[output_key].type): return _output_parameter_path_placeholder(output_key) else: return _output_artifact_path_placeholder(output_key) placeholder_resolver = ExtraPlaceholderResolver() def _resolve_ir_placeholders_v2(arg, component_spec: _structures.ComponentSpec, arguments: dict) -> str: inputs_dict = {input_spec.name: input_spec for input_spec in (component_spec.inputs or [])} if isinstance(arg, _structures.InputValuePlaceholder): input_name = arg.input_name input_value = arguments.get(input_name, None) if (input_value is not None): return _input_parameter_placeholder(input_name) else: input_spec = inputs_dict[input_name] if input_spec.optional: return None else: raise ValueError('No value provided for input {}'.format(input_name)) elif isinstance(arg, _structures.InputUriPlaceholder): input_name = arg.input_name if (input_name in arguments): input_uri = _input_artifact_uri_placeholder(input_name) return input_uri else: input_spec = inputs_dict[input_name] if input_spec.optional: return None else: raise ValueError('No value provided for input {}'.format(input_name)) elif isinstance(arg, _structures.OutputUriPlaceholder): output_name = arg.output_name output_uri = _output_artifact_uri_placeholder(output_name) return output_uri return placeholder_resolver.resolve_placeholder(arg=arg, component_spec=component_spec, arguments=arguments) resolved_cmd = _components._resolve_command_line_and_paths(component_spec=component_spec, arguments=arguments, input_path_generator=_input_artifact_path_placeholder, output_path_generator=_resolve_output_path_placeholder, placeholder_resolver=_resolve_ir_placeholders_v2) return resolved_cmd<|docstring|>Resolves the command line argument placeholders for v2 (IR). Args: component_spec: The component spec object. arguments: The dictionary of component arguments. Returns: A named tuple: _components._ResolvedCommandLineAndPaths.<|endoftext|>
bda7c9ad1e6dbc93c4c8be435df382515d9353268254d77d10b2ed771b2d0829
def find_all_event_files(dir_path): 'find all event files in directory `dir_path`.\n \n :param dir_path: directory path\n :type dir_path: str\n :return: list of file path. \n ' file_path_list = [] for (root, dirs, files) in os.walk(dir_path): for file_name in files: if ('events' in file_name): file_path_list.append(os.path.join(root, file_name)) return file_path_list
find all event files in directory `dir_path`. :param dir_path: directory path :type dir_path: str :return: list of file path.
tb_paddle/summary_reader.py
find_all_event_files
GT-AcerZhang/tb-paddle
0
python
def find_all_event_files(dir_path): 'find all event files in directory `dir_path`.\n \n :param dir_path: directory path\n :type dir_path: str\n :return: list of file path. \n ' file_path_list = [] for (root, dirs, files) in os.walk(dir_path): for file_name in files: if ('events' in file_name): file_path_list.append(os.path.join(root, file_name)) return file_path_list
def find_all_event_files(dir_path): 'find all event files in directory `dir_path`.\n \n :param dir_path: directory path\n :type dir_path: str\n :return: list of file path. \n ' file_path_list = [] for (root, dirs, files) in os.walk(dir_path): for file_name in files: if ('events' in file_name): file_path_list.append(os.path.join(root, file_name)) return file_path_list<|docstring|>find all event files in directory `dir_path`. :param dir_path: directory path :type dir_path: str :return: list of file path.<|endoftext|>
00184cba51e89ff8c27a8f67087c33773bf976bcf52b62e3cb1779383705e45a
def __init__(self, parse_dir): '\n :param parse_dir: the directory of events file.\n :type parse_dir: str\n ' self._parse_dir = parse_dir
:param parse_dir: the directory of events file. :type parse_dir: str
tb_paddle/summary_reader.py
__init__
GT-AcerZhang/tb-paddle
0
python
def __init__(self, parse_dir): '\n :param parse_dir: the directory of events file.\n :type parse_dir: str\n ' self._parse_dir = parse_dir
def __init__(self, parse_dir): '\n :param parse_dir: the directory of events file.\n :type parse_dir: str\n ' self._parse_dir = parse_dir<|docstring|>:param parse_dir: the directory of events file. :type parse_dir: str<|endoftext|>
f7a422b2e28b122d33340f6215ed09bf7394c1bc8b3d0aa63fe7df192c3d6e39
def get_scalar(self, tag): '\n :param tag: tag of scalar data.\n :type tag: str\n :return: a list of tuple, each element is (tag, step, scalar_value)\n ' event_file_list = find_all_event_files(self._parse_dir) result = [] for event_file in event_file_list: event_file_loader = EventFileLoader(event_file) for event_str in event_file_loader.Load(): if event_str.summary.value: for item in event_str.summary.value: if (item.tag == tag): result.append((item.tag, event_str.step, item.simple_value)) return result
:param tag: tag of scalar data. :type tag: str :return: a list of tuple, each element is (tag, step, scalar_value)
tb_paddle/summary_reader.py
get_scalar
GT-AcerZhang/tb-paddle
0
python
def get_scalar(self, tag): '\n :param tag: tag of scalar data.\n :type tag: str\n :return: a list of tuple, each element is (tag, step, scalar_value)\n ' event_file_list = find_all_event_files(self._parse_dir) result = [] for event_file in event_file_list: event_file_loader = EventFileLoader(event_file) for event_str in event_file_loader.Load(): if event_str.summary.value: for item in event_str.summary.value: if (item.tag == tag): result.append((item.tag, event_str.step, item.simple_value)) return result
def get_scalar(self, tag): '\n :param tag: tag of scalar data.\n :type tag: str\n :return: a list of tuple, each element is (tag, step, scalar_value)\n ' event_file_list = find_all_event_files(self._parse_dir) result = [] for event_file in event_file_list: event_file_loader = EventFileLoader(event_file) for event_str in event_file_loader.Load(): if event_str.summary.value: for item in event_str.summary.value: if (item.tag == tag): result.append((item.tag, event_str.step, item.simple_value)) return result<|docstring|>:param tag: tag of scalar data. :type tag: str :return: a list of tuple, each element is (tag, step, scalar_value)<|endoftext|>
5fab608d063664d145e2ee66db3d3d2b0c18bd623675aef8cfaae623db3ce099
def set_class_list(class_list_file): '\n User function: Get class list from file\n\n Args:\n class_list_file (str): Path to file containing all class names\n\n Returns:\n None\n ' f = open(class_list_file, 'r') system_dict['classes'] = ['__background__'] system_dict['classes'] += f.readlines() f.close() system_dict['rcnn_num_classes'] = len(system_dict['classes'])
User function: Get class list from file Args: class_list_file (str): Path to file containing all class names Returns: None
3_mxrcnn/lib/infer_base.py
set_class_list
Boltuzamaki/Monk_Object_Detection
549
python
def set_class_list(class_list_file): '\n User function: Get class list from file\n\n Args:\n class_list_file (str): Path to file containing all class names\n\n Returns:\n None\n ' f = open(class_list_file, 'r') system_dict['classes'] = ['__background__'] system_dict['classes'] += f.readlines() f.close() system_dict['rcnn_num_classes'] = len(system_dict['classes'])
def set_class_list(class_list_file): '\n User function: Get class list from file\n\n Args:\n class_list_file (str): Path to file containing all class names\n\n Returns:\n None\n ' f = open(class_list_file, 'r') system_dict['classes'] = ['__background__'] system_dict['classes'] += f.readlines() f.close() system_dict['rcnn_num_classes'] = len(system_dict['classes'])<|docstring|>User function: Get class list from file Args: class_list_file (str): Path to file containing all class names Returns: None<|endoftext|>
cd4bb3bffb2fc6b25679857cb90487f3d2381c58737bbfe1aad097e6df4f9218
def set_model_params(model_name='vgg16', model_path=None): '\n User function: Set model parameters\n\n Available models\n vgg16\n resnet50\n resnet101\n\n Args:\n model_name (str): Select from available models\n model_path (str): Path to model file\n\n Returns:\n None\n ' system_dict['network'] = model_name system_dict['params'] = model_path if (model_name == 'vgg16'): system_dict['rcnn_feat_stride'] = 16 system_dict['rcnn_pooled_size'] = '(7, 7)' system_dict['net_fixed_params'] = '["conv1", "conv2", "conv3", "conv4"]' elif ((model_name == 'resnet50') or (model_name == 'resnet101')): system_dict['rcnn_feat_stride'] = 16 system_dict['rcnn_pooled_size'] = '(14, 14)' system_dict['net_fixed_params'] = '["conv0", "stage1", "gamma", "beta"]' system_dict['net_fixed_params'] = ast.literal_eval(system_dict['net_fixed_params'])
User function: Set model parameters Available models vgg16 resnet50 resnet101 Args: model_name (str): Select from available models model_path (str): Path to model file Returns: None
3_mxrcnn/lib/infer_base.py
set_model_params
Boltuzamaki/Monk_Object_Detection
549
python
def set_model_params(model_name='vgg16', model_path=None): '\n User function: Set model parameters\n\n Available models\n vgg16\n resnet50\n resnet101\n\n Args:\n model_name (str): Select from available models\n model_path (str): Path to model file\n\n Returns:\n None\n ' system_dict['network'] = model_name system_dict['params'] = model_path if (model_name == 'vgg16'): system_dict['rcnn_feat_stride'] = 16 system_dict['rcnn_pooled_size'] = '(7, 7)' system_dict['net_fixed_params'] = '["conv1", "conv2", "conv3", "conv4"]' elif ((model_name == 'resnet50') or (model_name == 'resnet101')): system_dict['rcnn_feat_stride'] = 16 system_dict['rcnn_pooled_size'] = '(14, 14)' system_dict['net_fixed_params'] = '["conv0", "stage1", "gamma", "beta"]' system_dict['net_fixed_params'] = ast.literal_eval(system_dict['net_fixed_params'])
def set_model_params(model_name='vgg16', model_path=None): '\n User function: Set model parameters\n\n Available models\n vgg16\n resnet50\n resnet101\n\n Args:\n model_name (str): Select from available models\n model_path (str): Path to model file\n\n Returns:\n None\n ' system_dict['network'] = model_name system_dict['params'] = model_path if (model_name == 'vgg16'): system_dict['rcnn_feat_stride'] = 16 system_dict['rcnn_pooled_size'] = '(7, 7)' system_dict['net_fixed_params'] = '["conv1", "conv2", "conv3", "conv4"]' elif ((model_name == 'resnet50') or (model_name == 'resnet101')): system_dict['rcnn_feat_stride'] = 16 system_dict['rcnn_pooled_size'] = '(14, 14)' system_dict['net_fixed_params'] = '["conv0", "stage1", "gamma", "beta"]' system_dict['net_fixed_params'] = ast.literal_eval(system_dict['net_fixed_params'])<|docstring|>User function: Set model parameters Available models vgg16 resnet50 resnet101 Args: model_name (str): Select from available models model_path (str): Path to model file Returns: None<|endoftext|>
88c0ef6f9e3e504251bdfd816ba003a1d0c1e2dec6de4d7322f84dd4c56478f1
def set_img_preproc_params(img_short_side=600, img_long_side=1000, mean=(123.68, 116.779, 103.939), std=(1.0, 1.0, 1.0)): '\n User function: Set image preprocessing parameters\n\n Args:\n img_short_side (int): Minimum image size for rescaling\n img_long_side (int): Maximum image size for rescaling\n mean (tuple): 3-Channel mean for subtraction in preprocessing\n std (tuple): 3-Channel standard deviation for normalizing in preprocessing\n\n Returns:\n None\n ' system_dict['img_short_side'] = img_short_side system_dict['img_long_side'] = img_long_side system_dict['img_pixel_means'] = str(mean) system_dict['img_pixel_stds'] = str(std) system_dict['img_pixel_means'] = ast.literal_eval(system_dict['img_pixel_means']) system_dict['img_pixel_stds'] = ast.literal_eval(system_dict['img_pixel_stds'])
User function: Set image preprocessing parameters Args: img_short_side (int): Minimum image size for rescaling img_long_side (int): Maximum image size for rescaling mean (tuple): 3-Channel mean for subtraction in preprocessing std (tuple): 3-Channel standard deviation for normalizing in preprocessing Returns: None
3_mxrcnn/lib/infer_base.py
set_img_preproc_params
Boltuzamaki/Monk_Object_Detection
549
python
def set_img_preproc_params(img_short_side=600, img_long_side=1000, mean=(123.68, 116.779, 103.939), std=(1.0, 1.0, 1.0)): '\n User function: Set image preprocessing parameters\n\n Args:\n img_short_side (int): Minimum image size for rescaling\n img_long_side (int): Maximum image size for rescaling\n mean (tuple): 3-Channel mean for subtraction in preprocessing\n std (tuple): 3-Channel standard deviation for normalizing in preprocessing\n\n Returns:\n None\n ' system_dict['img_short_side'] = img_short_side system_dict['img_long_side'] = img_long_side system_dict['img_pixel_means'] = str(mean) system_dict['img_pixel_stds'] = str(std) system_dict['img_pixel_means'] = ast.literal_eval(system_dict['img_pixel_means']) system_dict['img_pixel_stds'] = ast.literal_eval(system_dict['img_pixel_stds'])
def set_img_preproc_params(img_short_side=600, img_long_side=1000, mean=(123.68, 116.779, 103.939), std=(1.0, 1.0, 1.0)): '\n User function: Set image preprocessing parameters\n\n Args:\n img_short_side (int): Minimum image size for rescaling\n img_long_side (int): Maximum image size for rescaling\n mean (tuple): 3-Channel mean for subtraction in preprocessing\n std (tuple): 3-Channel standard deviation for normalizing in preprocessing\n\n Returns:\n None\n ' system_dict['img_short_side'] = img_short_side system_dict['img_long_side'] = img_long_side system_dict['img_pixel_means'] = str(mean) system_dict['img_pixel_stds'] = str(std) system_dict['img_pixel_means'] = ast.literal_eval(system_dict['img_pixel_means']) system_dict['img_pixel_stds'] = ast.literal_eval(system_dict['img_pixel_stds'])<|docstring|>User function: Set image preprocessing parameters Args: img_short_side (int): Minimum image size for rescaling img_long_side (int): Maximum image size for rescaling mean (tuple): 3-Channel mean for subtraction in preprocessing std (tuple): 3-Channel standard deviation for normalizing in preprocessing Returns: None<|endoftext|>
42989d5a964f3516cb07b51f34149d7338344598057f423b7354bd99a3bc5beb
def initialize_rpn_params(): '\n User function: Initialize all RPN parameters\n\n Args:\n None\n\n Returns:\n None\n ' system_dict['rpn_feat_stride'] = 16 system_dict['rpn_anchor_scales'] = '(8, 16, 32)' system_dict['rpn_anchor_ratios'] = '(0.5, 1, 2)' system_dict['rpn_pre_nms_topk'] = 12000 system_dict['rpn_post_nms_topk'] = 2000 system_dict['rpn_nms_thresh'] = 0.7 system_dict['rpn_min_size'] = 16 system_dict['rpn_batch_rois'] = 256 system_dict['rpn_allowed_border'] = 0 system_dict['rpn_fg_fraction'] = 0.5 system_dict['rpn_fg_overlap'] = 0.7 system_dict['rpn_bg_overlap'] = 0.3 system_dict['rpn_anchor_scales'] = ast.literal_eval(system_dict['rpn_anchor_scales']) system_dict['rpn_anchor_ratios'] = ast.literal_eval(system_dict['rpn_anchor_ratios'])
User function: Initialize all RPN parameters Args: None Returns: None
3_mxrcnn/lib/infer_base.py
initialize_rpn_params
Boltuzamaki/Monk_Object_Detection
549
python
def initialize_rpn_params(): '\n User function: Initialize all RPN parameters\n\n Args:\n None\n\n Returns:\n None\n ' system_dict['rpn_feat_stride'] = 16 system_dict['rpn_anchor_scales'] = '(8, 16, 32)' system_dict['rpn_anchor_ratios'] = '(0.5, 1, 2)' system_dict['rpn_pre_nms_topk'] = 12000 system_dict['rpn_post_nms_topk'] = 2000 system_dict['rpn_nms_thresh'] = 0.7 system_dict['rpn_min_size'] = 16 system_dict['rpn_batch_rois'] = 256 system_dict['rpn_allowed_border'] = 0 system_dict['rpn_fg_fraction'] = 0.5 system_dict['rpn_fg_overlap'] = 0.7 system_dict['rpn_bg_overlap'] = 0.3 system_dict['rpn_anchor_scales'] = ast.literal_eval(system_dict['rpn_anchor_scales']) system_dict['rpn_anchor_ratios'] = ast.literal_eval(system_dict['rpn_anchor_ratios'])
def initialize_rpn_params(): '\n User function: Initialize all RPN parameters\n\n Args:\n None\n\n Returns:\n None\n ' system_dict['rpn_feat_stride'] = 16 system_dict['rpn_anchor_scales'] = '(8, 16, 32)' system_dict['rpn_anchor_ratios'] = '(0.5, 1, 2)' system_dict['rpn_pre_nms_topk'] = 12000 system_dict['rpn_post_nms_topk'] = 2000 system_dict['rpn_nms_thresh'] = 0.7 system_dict['rpn_min_size'] = 16 system_dict['rpn_batch_rois'] = 256 system_dict['rpn_allowed_border'] = 0 system_dict['rpn_fg_fraction'] = 0.5 system_dict['rpn_fg_overlap'] = 0.7 system_dict['rpn_bg_overlap'] = 0.3 system_dict['rpn_anchor_scales'] = ast.literal_eval(system_dict['rpn_anchor_scales']) system_dict['rpn_anchor_ratios'] = ast.literal_eval(system_dict['rpn_anchor_ratios'])<|docstring|>User function: Initialize all RPN parameters Args: None Returns: None<|endoftext|>
0a5e306a56ee2ce48ed7c5cf0889b18f20d499c027cad539420b52891a8adba2
def initialize_rcnn_params(): '\n User function: Initialize all RCNN parameters\n\n Args:\n None\n\n Returns:\n None\n ' system_dict['rcnn_batch_rois'] = 128 system_dict['rcnn_fg_fraction'] = 0.25 system_dict['rcnn_fg_overlap'] = 0.5 system_dict['rcnn_bbox_stds'] = '(0.1, 0.1, 0.2, 0.2)' system_dict['rcnn_nms_thresh'] = 0.3 system_dict['rcnn_conf_thresh'] = 0.001 system_dict['rcnn_pooled_size'] = ast.literal_eval(system_dict['rcnn_pooled_size']) system_dict['rcnn_bbox_stds'] = ast.literal_eval(system_dict['rcnn_bbox_stds'])
User function: Initialize all RCNN parameters Args: None Returns: None
3_mxrcnn/lib/infer_base.py
initialize_rcnn_params
Boltuzamaki/Monk_Object_Detection
549
python
def initialize_rcnn_params(): '\n User function: Initialize all RCNN parameters\n\n Args:\n None\n\n Returns:\n None\n ' system_dict['rcnn_batch_rois'] = 128 system_dict['rcnn_fg_fraction'] = 0.25 system_dict['rcnn_fg_overlap'] = 0.5 system_dict['rcnn_bbox_stds'] = '(0.1, 0.1, 0.2, 0.2)' system_dict['rcnn_nms_thresh'] = 0.3 system_dict['rcnn_conf_thresh'] = 0.001 system_dict['rcnn_pooled_size'] = ast.literal_eval(system_dict['rcnn_pooled_size']) system_dict['rcnn_bbox_stds'] = ast.literal_eval(system_dict['rcnn_bbox_stds'])
def initialize_rcnn_params(): '\n User function: Initialize all RCNN parameters\n\n Args:\n None\n\n Returns:\n None\n ' system_dict['rcnn_batch_rois'] = 128 system_dict['rcnn_fg_fraction'] = 0.25 system_dict['rcnn_fg_overlap'] = 0.5 system_dict['rcnn_bbox_stds'] = '(0.1, 0.1, 0.2, 0.2)' system_dict['rcnn_nms_thresh'] = 0.3 system_dict['rcnn_conf_thresh'] = 0.001 system_dict['rcnn_pooled_size'] = ast.literal_eval(system_dict['rcnn_pooled_size']) system_dict['rcnn_bbox_stds'] = ast.literal_eval(system_dict['rcnn_bbox_stds'])<|docstring|>User function: Initialize all RCNN parameters Args: None Returns: None<|endoftext|>
7deb286ddf44d9926f8633236a6cae8c1249bd7df322a519a92725006e146108
def set_hyper_params(gpus='0', batch_size=1): '\n User function: Set hyper parameters\n\n Args:\n gpus (string): String mentioning gpu device ID to run the inference on.\n\n Returns:\n None\n ' system_dict['gpu'] = gpus.split(',')[0] system_dict['rcnn_batch_size'] = batch_size
User function: Set hyper parameters Args: gpus (string): String mentioning gpu device ID to run the inference on. Returns: None
3_mxrcnn/lib/infer_base.py
set_hyper_params
Boltuzamaki/Monk_Object_Detection
549
python
def set_hyper_params(gpus='0', batch_size=1): '\n User function: Set hyper parameters\n\n Args:\n gpus (string): String mentioning gpu device ID to run the inference on.\n\n Returns:\n None\n ' system_dict['gpu'] = gpus.split(',')[0] system_dict['rcnn_batch_size'] = batch_size
def set_hyper_params(gpus='0', batch_size=1): '\n User function: Set hyper parameters\n\n Args:\n gpus (string): String mentioning gpu device ID to run the inference on.\n\n Returns:\n None\n ' system_dict['gpu'] = gpus.split(',')[0] system_dict['rcnn_batch_size'] = batch_size<|docstring|>User function: Set hyper parameters Args: gpus (string): String mentioning gpu device ID to run the inference on. Returns: None<|endoftext|>
7d5536bf802b317b4d9bcb0cadc510f7b0a706090af1b49d4b98a7e0356a18dd
def set_output_params(vis_thresh=0.8, vis=False): '\n User function: Set output parameters\n\n Args:\n vis_thresh (float): Threshold for predicted scores. Scores for objects detected below this score will not be displayed \n vis (bool): If True, the output will be displayed.\n\n Returns:\n None\n ' system_dict['vis_thresh'] = vis_thresh system_dict['vis'] = vis
User function: Set output parameters Args: vis_thresh (float): Threshold for predicted scores. Scores for objects detected below this score will not be displayed vis (bool): If True, the output will be displayed. Returns: None
3_mxrcnn/lib/infer_base.py
set_output_params
Boltuzamaki/Monk_Object_Detection
549
python
def set_output_params(vis_thresh=0.8, vis=False): '\n User function: Set output parameters\n\n Args:\n vis_thresh (float): Threshold for predicted scores. Scores for objects detected below this score will not be displayed \n vis (bool): If True, the output will be displayed.\n\n Returns:\n None\n ' system_dict['vis_thresh'] = vis_thresh system_dict['vis'] = vis
def set_output_params(vis_thresh=0.8, vis=False): '\n User function: Set output parameters\n\n Args:\n vis_thresh (float): Threshold for predicted scores. Scores for objects detected below this score will not be displayed \n vis (bool): If True, the output will be displayed.\n\n Returns:\n None\n ' system_dict['vis_thresh'] = vis_thresh system_dict['vis'] = vis<|docstring|>User function: Set output parameters Args: vis_thresh (float): Threshold for predicted scores. Scores for objects detected below this score will not be displayed vis (bool): If True, the output will be displayed. Returns: None<|endoftext|>
3fdd62a3ce763e838639599b62092b5ff82654495b7b9564bcc5a260eaaa970e
def get_vgg16_test(system_dict): '\n Internal function: Select vgg16 params\n\n Args:\n system_dict (dict): Dictionary of all the parameters selected for training\n\n Returns:\n mxnet model: Vgg16 model\n ' from symnet.symbol_vgg import get_vgg_test return get_vgg_test(anchor_scales=system_dict['rpn_anchor_scales'], anchor_ratios=system_dict['rpn_anchor_ratios'], rpn_feature_stride=system_dict['rpn_feat_stride'], rpn_pre_topk=system_dict['rpn_pre_nms_topk'], rpn_post_topk=system_dict['rpn_post_nms_topk'], rpn_nms_thresh=system_dict['rpn_nms_thresh'], rpn_min_size=system_dict['rpn_min_size'], num_classes=system_dict['rcnn_num_classes'], rcnn_feature_stride=system_dict['rcnn_feat_stride'], rcnn_pooled_size=system_dict['rcnn_pooled_size'], rcnn_batch_size=system_dict['rcnn_batch_size'])
Internal function: Select vgg16 params Args: system_dict (dict): Dictionary of all the parameters selected for training Returns: mxnet model: Vgg16 model
3_mxrcnn/lib/infer_base.py
get_vgg16_test
Boltuzamaki/Monk_Object_Detection
549
python
def get_vgg16_test(system_dict): '\n Internal function: Select vgg16 params\n\n Args:\n system_dict (dict): Dictionary of all the parameters selected for training\n\n Returns:\n mxnet model: Vgg16 model\n ' from symnet.symbol_vgg import get_vgg_test return get_vgg_test(anchor_scales=system_dict['rpn_anchor_scales'], anchor_ratios=system_dict['rpn_anchor_ratios'], rpn_feature_stride=system_dict['rpn_feat_stride'], rpn_pre_topk=system_dict['rpn_pre_nms_topk'], rpn_post_topk=system_dict['rpn_post_nms_topk'], rpn_nms_thresh=system_dict['rpn_nms_thresh'], rpn_min_size=system_dict['rpn_min_size'], num_classes=system_dict['rcnn_num_classes'], rcnn_feature_stride=system_dict['rcnn_feat_stride'], rcnn_pooled_size=system_dict['rcnn_pooled_size'], rcnn_batch_size=system_dict['rcnn_batch_size'])
def get_vgg16_test(system_dict): '\n Internal function: Select vgg16 params\n\n Args:\n system_dict (dict): Dictionary of all the parameters selected for training\n\n Returns:\n mxnet model: Vgg16 model\n ' from symnet.symbol_vgg import get_vgg_test return get_vgg_test(anchor_scales=system_dict['rpn_anchor_scales'], anchor_ratios=system_dict['rpn_anchor_ratios'], rpn_feature_stride=system_dict['rpn_feat_stride'], rpn_pre_topk=system_dict['rpn_pre_nms_topk'], rpn_post_topk=system_dict['rpn_post_nms_topk'], rpn_nms_thresh=system_dict['rpn_nms_thresh'], rpn_min_size=system_dict['rpn_min_size'], num_classes=system_dict['rcnn_num_classes'], rcnn_feature_stride=system_dict['rcnn_feat_stride'], rcnn_pooled_size=system_dict['rcnn_pooled_size'], rcnn_batch_size=system_dict['rcnn_batch_size'])<|docstring|>Internal function: Select vgg16 params Args: system_dict (dict): Dictionary of all the parameters selected for training Returns: mxnet model: Vgg16 model<|endoftext|>
f66d6e20b1c907d57b0797bee383e2d5d41b77b72b803632c7fd409f659da882
def get_resnet50_test(system_dict): '\n Internal function: Select resnet50 params\n\n Args:\n system_dict (dict): Dictionary of all the parameters selected for training\n\n Returns:\n mxnet model: Resnet50 model\n ' from symnet.symbol_resnet import get_resnet_test return get_resnet_test(anchor_scales=system_dict['rpn_anchor_scales'], anchor_ratios=system_dict['rpn_anchor_ratios'], rpn_feature_stride=system_dict['rpn_feat_stride'], rpn_pre_topk=system_dict['rpn_pre_nms_topk'], rpn_post_topk=system_dict['rpn_post_nms_topk'], rpn_nms_thresh=system_dict['rpn_nms_thresh'], rpn_min_size=system_dict['rpn_min_size'], num_classes=system_dict['rcnn_num_classes'], rcnn_feature_stride=system_dict['rcnn_feat_stride'], rcnn_pooled_size=system_dict['rcnn_pooled_size'], rcnn_batch_size=system_dict['rcnn_batch_size'], units=(3, 4, 6, 3), filter_list=(256, 512, 1024, 2048))
Internal function: Select resnet50 params Args: system_dict (dict): Dictionary of all the parameters selected for training Returns: mxnet model: Resnet50 model
3_mxrcnn/lib/infer_base.py
get_resnet50_test
Boltuzamaki/Monk_Object_Detection
549
python
def get_resnet50_test(system_dict): '\n Internal function: Select resnet50 params\n\n Args:\n system_dict (dict): Dictionary of all the parameters selected for training\n\n Returns:\n mxnet model: Resnet50 model\n ' from symnet.symbol_resnet import get_resnet_test return get_resnet_test(anchor_scales=system_dict['rpn_anchor_scales'], anchor_ratios=system_dict['rpn_anchor_ratios'], rpn_feature_stride=system_dict['rpn_feat_stride'], rpn_pre_topk=system_dict['rpn_pre_nms_topk'], rpn_post_topk=system_dict['rpn_post_nms_topk'], rpn_nms_thresh=system_dict['rpn_nms_thresh'], rpn_min_size=system_dict['rpn_min_size'], num_classes=system_dict['rcnn_num_classes'], rcnn_feature_stride=system_dict['rcnn_feat_stride'], rcnn_pooled_size=system_dict['rcnn_pooled_size'], rcnn_batch_size=system_dict['rcnn_batch_size'], units=(3, 4, 6, 3), filter_list=(256, 512, 1024, 2048))
def get_resnet50_test(system_dict): '\n Internal function: Select resnet50 params\n\n Args:\n system_dict (dict): Dictionary of all the parameters selected for training\n\n Returns:\n mxnet model: Resnet50 model\n ' from symnet.symbol_resnet import get_resnet_test return get_resnet_test(anchor_scales=system_dict['rpn_anchor_scales'], anchor_ratios=system_dict['rpn_anchor_ratios'], rpn_feature_stride=system_dict['rpn_feat_stride'], rpn_pre_topk=system_dict['rpn_pre_nms_topk'], rpn_post_topk=system_dict['rpn_post_nms_topk'], rpn_nms_thresh=system_dict['rpn_nms_thresh'], rpn_min_size=system_dict['rpn_min_size'], num_classes=system_dict['rcnn_num_classes'], rcnn_feature_stride=system_dict['rcnn_feat_stride'], rcnn_pooled_size=system_dict['rcnn_pooled_size'], rcnn_batch_size=system_dict['rcnn_batch_size'], units=(3, 4, 6, 3), filter_list=(256, 512, 1024, 2048))<|docstring|>Internal function: Select resnet50 params Args: system_dict (dict): Dictionary of all the parameters selected for training Returns: mxnet model: Resnet50 model<|endoftext|>
a9051bab8cfcbabbfc468127552cd4cbb987a87178124b7b237a7ebca32d9b34
def get_resnet101_test(system_dict): '\n Internal function: Select resnet101 params\n\n Args:\n system_dict (dict): Dictionary of all the parameters selected for training\n\n Returns:\n mxnet model: Resnet101 model\n ' from symnet.symbol_resnet import get_resnet_test return get_resnet_test(anchor_scales=system_dict['rpn_anchor_scales'], anchor_ratios=system_dict['rpn_anchor_ratios'], rpn_feature_stride=system_dict['rpn_feat_stride'], rpn_pre_topk=system_dict['rpn_pre_nms_topk'], rpn_post_topk=system_dict['rpn_post_nms_topk'], rpn_nms_thresh=system_dict['rpn_nms_thresh'], rpn_min_size=system_dict['rpn_min_size'], num_classes=system_dict['rcnn_num_classes'], rcnn_feature_stride=system_dict['rcnn_feat_stride'], rcnn_pooled_size=system_dict['rcnn_pooled_size'], rcnn_batch_size=system_dict['rcnn_batch_size'], units=(3, 4, 23, 3), filter_list=(256, 512, 1024, 2048))
Internal function: Select resnet101 params Args: system_dict (dict): Dictionary of all the parameters selected for training Returns: mxnet model: Resnet101 model
3_mxrcnn/lib/infer_base.py
get_resnet101_test
Boltuzamaki/Monk_Object_Detection
549
python
def get_resnet101_test(system_dict): '\n Internal function: Select resnet101 params\n\n Args:\n system_dict (dict): Dictionary of all the parameters selected for training\n\n Returns:\n mxnet model: Resnet101 model\n ' from symnet.symbol_resnet import get_resnet_test return get_resnet_test(anchor_scales=system_dict['rpn_anchor_scales'], anchor_ratios=system_dict['rpn_anchor_ratios'], rpn_feature_stride=system_dict['rpn_feat_stride'], rpn_pre_topk=system_dict['rpn_pre_nms_topk'], rpn_post_topk=system_dict['rpn_post_nms_topk'], rpn_nms_thresh=system_dict['rpn_nms_thresh'], rpn_min_size=system_dict['rpn_min_size'], num_classes=system_dict['rcnn_num_classes'], rcnn_feature_stride=system_dict['rcnn_feat_stride'], rcnn_pooled_size=system_dict['rcnn_pooled_size'], rcnn_batch_size=system_dict['rcnn_batch_size'], units=(3, 4, 23, 3), filter_list=(256, 512, 1024, 2048))
def get_resnet101_test(system_dict): '\n Internal function: Select resnet101 params\n\n Args:\n system_dict (dict): Dictionary of all the parameters selected for training\n\n Returns:\n mxnet model: Resnet101 model\n ' from symnet.symbol_resnet import get_resnet_test return get_resnet_test(anchor_scales=system_dict['rpn_anchor_scales'], anchor_ratios=system_dict['rpn_anchor_ratios'], rpn_feature_stride=system_dict['rpn_feat_stride'], rpn_pre_topk=system_dict['rpn_pre_nms_topk'], rpn_post_topk=system_dict['rpn_post_nms_topk'], rpn_nms_thresh=system_dict['rpn_nms_thresh'], rpn_min_size=system_dict['rpn_min_size'], num_classes=system_dict['rcnn_num_classes'], rcnn_feature_stride=system_dict['rcnn_feat_stride'], rcnn_pooled_size=system_dict['rcnn_pooled_size'], rcnn_batch_size=system_dict['rcnn_batch_size'], units=(3, 4, 23, 3), filter_list=(256, 512, 1024, 2048))<|docstring|>Internal function: Select resnet101 params Args: system_dict (dict): Dictionary of all the parameters selected for training Returns: mxnet model: Resnet101 model<|endoftext|>
1a9e7349666bebcfdde76ed72a3cf2caeecd4da8b76102c076dea75bb313ab08
def set_network(): '\n User function: Set the train model\n\n Args:\n None\n\n Returns:\n mxnet model: Model as per selected params\n ' network = system_dict['network'] networks = {'vgg16': get_vgg16_test, 'resnet50': get_resnet50_test, 'resnet101': get_resnet101_test} if (network not in networks): raise ValueError('network {} not supported'.format(network)) return networks[network](system_dict)
User function: Set the train model Args: None Returns: mxnet model: Model as per selected params
3_mxrcnn/lib/infer_base.py
set_network
Boltuzamaki/Monk_Object_Detection
549
python
def set_network(): '\n User function: Set the train model\n\n Args:\n None\n\n Returns:\n mxnet model: Model as per selected params\n ' network = system_dict['network'] networks = {'vgg16': get_vgg16_test, 'resnet50': get_resnet50_test, 'resnet101': get_resnet101_test} if (network not in networks): raise ValueError('network {} not supported'.format(network)) return networks[network](system_dict)
def set_network(): '\n User function: Set the train model\n\n Args:\n None\n\n Returns:\n mxnet model: Model as per selected params\n ' network = system_dict['network'] networks = {'vgg16': get_vgg16_test, 'resnet50': get_resnet50_test, 'resnet101': get_resnet101_test} if (network not in networks): raise ValueError('network {} not supported'.format(network)) return networks[network](system_dict)<|docstring|>User function: Set the train model Args: None Returns: mxnet model: Model as per selected params<|endoftext|>
f34381c086cba5aafc4f9beb5322c099c6441fae8829a0f7ee0def37214d54f0
def load_model(sym): '\n User function: Loads the trained model weights \n\n Args:\n sym (mxnet model): Mxnet model returned from set_network() function\n\n Returns:\n mxnet model: Model with trained weights\n ' if system_dict['gpu']: ctx = mx.gpu(int(system_dict['gpu'])) else: ctx = mx.cpu(0) (arg_params, aux_params) = load_param(system_dict['params'], ctx=ctx) data_names = ['data', 'im_info'] label_names = None data_shapes = [('data', (1, 3, system_dict['img_long_side'], system_dict['img_long_side'])), ('im_info', (1, 3))] label_shapes = None check_shape(sym, data_shapes, arg_params, aux_params) mod = Module(sym, data_names, label_names, context=ctx) mod.bind(data_shapes, label_shapes, for_training=False) mod.init_params(arg_params=arg_params, aux_params=aux_params) return mod
User function: Loads the trained model weights Args: sym (mxnet model): Mxnet model returned from set_network() function Returns: mxnet model: Model with trained weights
3_mxrcnn/lib/infer_base.py
load_model
Boltuzamaki/Monk_Object_Detection
549
python
def load_model(sym): '\n User function: Loads the trained model weights \n\n Args:\n sym (mxnet model): Mxnet model returned from set_network() function\n\n Returns:\n mxnet model: Model with trained weights\n ' if system_dict['gpu']: ctx = mx.gpu(int(system_dict['gpu'])) else: ctx = mx.cpu(0) (arg_params, aux_params) = load_param(system_dict['params'], ctx=ctx) data_names = ['data', 'im_info'] label_names = None data_shapes = [('data', (1, 3, system_dict['img_long_side'], system_dict['img_long_side'])), ('im_info', (1, 3))] label_shapes = None check_shape(sym, data_shapes, arg_params, aux_params) mod = Module(sym, data_names, label_names, context=ctx) mod.bind(data_shapes, label_shapes, for_training=False) mod.init_params(arg_params=arg_params, aux_params=aux_params) return mod
def load_model(sym): '\n User function: Loads the trained model weights \n\n Args:\n sym (mxnet model): Mxnet model returned from set_network() function\n\n Returns:\n mxnet model: Model with trained weights\n ' if system_dict['gpu']: ctx = mx.gpu(int(system_dict['gpu'])) else: ctx = mx.cpu(0) (arg_params, aux_params) = load_param(system_dict['params'], ctx=ctx) data_names = ['data', 'im_info'] label_names = None data_shapes = [('data', (1, 3, system_dict['img_long_side'], system_dict['img_long_side'])), ('im_info', (1, 3))] label_shapes = None check_shape(sym, data_shapes, arg_params, aux_params) mod = Module(sym, data_names, label_names, context=ctx) mod.bind(data_shapes, label_shapes, for_training=False) mod.init_params(arg_params=arg_params, aux_params=aux_params) return mod<|docstring|>User function: Loads the trained model weights Args: sym (mxnet model): Mxnet model returned from set_network() function Returns: mxnet model: Model with trained weights<|endoftext|>
ed342ac24aea6fa2ba354f1a8c70fffaae4b1bd7fe636da0031d27fc611363f4
def Infer(img_name, mod): '\n User function: Run inference on image and visualize it\n\n Args:\n img_name (str): Relative path to the image file\n mod (mxnet model): Mxnet model returned from load_model() function\n\n Returns:\n list: Contaning IDs, Scores and bounding box locations of predicted objects. \n ' system_dict['image'] = img_name if system_dict['gpu']: ctx = mx.gpu(int(system_dict['gpu'])) else: ctx = mx.cpu(0) (im_tensor, im_info, im_orig) = load_test(system_dict['image'], short=system_dict['img_short_side'], max_size=system_dict['img_long_side'], mean=system_dict['img_pixel_means'], std=system_dict['img_pixel_stds']) data_batch = generate_batch(im_tensor, im_info) mod.forward(data_batch) (rois, scores, bbox_deltas) = mod.get_outputs() rois = rois[(:, 1:)] scores = scores[0] bbox_deltas = bbox_deltas[0] im_info = im_info[0] det = im_detect(rois, scores, bbox_deltas, im_info, bbox_stds=system_dict['rcnn_bbox_stds'], nms_thresh=system_dict['rcnn_nms_thresh'], conf_thresh=system_dict['rcnn_conf_thresh']) output = [] conf_scores = [] for [cls, conf, x1, y1, x2, y2] in det: output.append([system_dict['classes'][int(cls)], conf, [x1, y1, x2, y2]]) conf_scores.append(conf) if ((cls > 0) and (conf > system_dict['vis_thresh'])): print(system_dict['classes'][int(cls)], conf, [x1, y1, x2, y2]) max_index = conf_scores.index(max(conf_scores)) print(output[max_index]) if system_dict['vis']: vis_detection(im_orig, det, system_dict['classes'], thresh=system_dict['vis_thresh']) save_detection(im_orig, det, system_dict['classes'], thresh=system_dict['vis_thresh']) return output
User function: Run inference on image and visualize it Args: img_name (str): Relative path to the image file mod (mxnet model): Mxnet model returned from load_model() function Returns: list: Contaning IDs, Scores and bounding box locations of predicted objects.
3_mxrcnn/lib/infer_base.py
Infer
Boltuzamaki/Monk_Object_Detection
549
python
def Infer(img_name, mod): '\n User function: Run inference on image and visualize it\n\n Args:\n img_name (str): Relative path to the image file\n mod (mxnet model): Mxnet model returned from load_model() function\n\n Returns:\n list: Contaning IDs, Scores and bounding box locations of predicted objects. \n ' system_dict['image'] = img_name if system_dict['gpu']: ctx = mx.gpu(int(system_dict['gpu'])) else: ctx = mx.cpu(0) (im_tensor, im_info, im_orig) = load_test(system_dict['image'], short=system_dict['img_short_side'], max_size=system_dict['img_long_side'], mean=system_dict['img_pixel_means'], std=system_dict['img_pixel_stds']) data_batch = generate_batch(im_tensor, im_info) mod.forward(data_batch) (rois, scores, bbox_deltas) = mod.get_outputs() rois = rois[(:, 1:)] scores = scores[0] bbox_deltas = bbox_deltas[0] im_info = im_info[0] det = im_detect(rois, scores, bbox_deltas, im_info, bbox_stds=system_dict['rcnn_bbox_stds'], nms_thresh=system_dict['rcnn_nms_thresh'], conf_thresh=system_dict['rcnn_conf_thresh']) output = [] conf_scores = [] for [cls, conf, x1, y1, x2, y2] in det: output.append([system_dict['classes'][int(cls)], conf, [x1, y1, x2, y2]]) conf_scores.append(conf) if ((cls > 0) and (conf > system_dict['vis_thresh'])): print(system_dict['classes'][int(cls)], conf, [x1, y1, x2, y2]) max_index = conf_scores.index(max(conf_scores)) print(output[max_index]) if system_dict['vis']: vis_detection(im_orig, det, system_dict['classes'], thresh=system_dict['vis_thresh']) save_detection(im_orig, det, system_dict['classes'], thresh=system_dict['vis_thresh']) return output
def Infer(img_name, mod): '\n User function: Run inference on image and visualize it\n\n Args:\n img_name (str): Relative path to the image file\n mod (mxnet model): Mxnet model returned from load_model() function\n\n Returns:\n list: Contaning IDs, Scores and bounding box locations of predicted objects. \n ' system_dict['image'] = img_name if system_dict['gpu']: ctx = mx.gpu(int(system_dict['gpu'])) else: ctx = mx.cpu(0) (im_tensor, im_info, im_orig) = load_test(system_dict['image'], short=system_dict['img_short_side'], max_size=system_dict['img_long_side'], mean=system_dict['img_pixel_means'], std=system_dict['img_pixel_stds']) data_batch = generate_batch(im_tensor, im_info) mod.forward(data_batch) (rois, scores, bbox_deltas) = mod.get_outputs() rois = rois[(:, 1:)] scores = scores[0] bbox_deltas = bbox_deltas[0] im_info = im_info[0] det = im_detect(rois, scores, bbox_deltas, im_info, bbox_stds=system_dict['rcnn_bbox_stds'], nms_thresh=system_dict['rcnn_nms_thresh'], conf_thresh=system_dict['rcnn_conf_thresh']) output = [] conf_scores = [] for [cls, conf, x1, y1, x2, y2] in det: output.append([system_dict['classes'][int(cls)], conf, [x1, y1, x2, y2]]) conf_scores.append(conf) if ((cls > 0) and (conf > system_dict['vis_thresh'])): print(system_dict['classes'][int(cls)], conf, [x1, y1, x2, y2]) max_index = conf_scores.index(max(conf_scores)) print(output[max_index]) if system_dict['vis']: vis_detection(im_orig, det, system_dict['classes'], thresh=system_dict['vis_thresh']) save_detection(im_orig, det, system_dict['classes'], thresh=system_dict['vis_thresh']) return output<|docstring|>User function: Run inference on image and visualize it Args: img_name (str): Relative path to the image file mod (mxnet model): Mxnet model returned from load_model() function Returns: list: Contaning IDs, Scores and bounding box locations of predicted objects.<|endoftext|>
d623f79a0df972d5987d394e4c07a97887a083cf874c87bd51d593dc3618fd4e
def __init__(self, simulator, body_id, to_position, link_id=(- 1), noise=None, ticks=1, latency=None, position=None, orientation=None): '\n Initialize the Ray sensor.\n\n Args:\n simulator (Simulator): simulator instance.\n body_id (int): unique body id.\n to_position (np.array[float[3]]): position where the ray should stop with respect to the new local link\n frame (specified by :attr:`position` and :attr:`orientation`).\n link_id (int): unique id of the link.\n noise (None, Noise): noise to be added.\n ticks (int): number of steps to wait/sleep before acquisition of the next sensor value.\n latency (int, float, None): latency time / step.\n position (np.array[float[3]], None): local position of the sensor with respect to the given link. If None,\n it will be the zero vector.\n orientation (np.array[float[4]], None): local orientation of the sensor with respect to the given link\n (expressed as a quaternion [x,y,z,w]). If None, it will be the unit quaternion [0,0,0,1].\n ' super(RaySensor, self).__init__(simulator, body_id=body_id, link_id=link_id, noise=noise, ticks=ticks, latency=latency, position=position, orientation=orientation) if isinstance(to_position, (tuple, list)): to_position = np.asarray(to_position) if (not isinstance(to_position, np.ndarray)): raise TypeError("Expecting the given 'to_position' to be a np.array, but got instead: {}".format(type(to_position))) if (to_position.shape != (3,)): raise ValueError("Expecting the shape of the given 'to_position' to be (3,), but got instead a shape of: {}".format(to_position.shape)) self.to_position = to_position
Initialize the Ray sensor. Args: simulator (Simulator): simulator instance. body_id (int): unique body id. to_position (np.array[float[3]]): position where the ray should stop with respect to the new local link frame (specified by :attr:`position` and :attr:`orientation`). link_id (int): unique id of the link. noise (None, Noise): noise to be added. ticks (int): number of steps to wait/sleep before acquisition of the next sensor value. latency (int, float, None): latency time / step. position (np.array[float[3]], None): local position of the sensor with respect to the given link. If None, it will be the zero vector. orientation (np.array[float[4]], None): local orientation of the sensor with respect to the given link (expressed as a quaternion [x,y,z,w]). If None, it will be the unit quaternion [0,0,0,1].
pyrobolearn/robots/sensors/ray.py
__init__
benjaminalt/pyrobolearn
2
python
def __init__(self, simulator, body_id, to_position, link_id=(- 1), noise=None, ticks=1, latency=None, position=None, orientation=None): '\n Initialize the Ray sensor.\n\n Args:\n simulator (Simulator): simulator instance.\n body_id (int): unique body id.\n to_position (np.array[float[3]]): position where the ray should stop with respect to the new local link\n frame (specified by :attr:`position` and :attr:`orientation`).\n link_id (int): unique id of the link.\n noise (None, Noise): noise to be added.\n ticks (int): number of steps to wait/sleep before acquisition of the next sensor value.\n latency (int, float, None): latency time / step.\n position (np.array[float[3]], None): local position of the sensor with respect to the given link. If None,\n it will be the zero vector.\n orientation (np.array[float[4]], None): local orientation of the sensor with respect to the given link\n (expressed as a quaternion [x,y,z,w]). If None, it will be the unit quaternion [0,0,0,1].\n ' super(RaySensor, self).__init__(simulator, body_id=body_id, link_id=link_id, noise=noise, ticks=ticks, latency=latency, position=position, orientation=orientation) if isinstance(to_position, (tuple, list)): to_position = np.asarray(to_position) if (not isinstance(to_position, np.ndarray)): raise TypeError("Expecting the given 'to_position' to be a np.array, but got instead: {}".format(type(to_position))) if (to_position.shape != (3,)): raise ValueError("Expecting the shape of the given 'to_position' to be (3,), but got instead a shape of: {}".format(to_position.shape)) self.to_position = to_position
def __init__(self, simulator, body_id, to_position, link_id=(- 1), noise=None, ticks=1, latency=None, position=None, orientation=None): '\n Initialize the Ray sensor.\n\n Args:\n simulator (Simulator): simulator instance.\n body_id (int): unique body id.\n to_position (np.array[float[3]]): position where the ray should stop with respect to the new local link\n frame (specified by :attr:`position` and :attr:`orientation`).\n link_id (int): unique id of the link.\n noise (None, Noise): noise to be added.\n ticks (int): number of steps to wait/sleep before acquisition of the next sensor value.\n latency (int, float, None): latency time / step.\n position (np.array[float[3]], None): local position of the sensor with respect to the given link. If None,\n it will be the zero vector.\n orientation (np.array[float[4]], None): local orientation of the sensor with respect to the given link\n (expressed as a quaternion [x,y,z,w]). If None, it will be the unit quaternion [0,0,0,1].\n ' super(RaySensor, self).__init__(simulator, body_id=body_id, link_id=link_id, noise=noise, ticks=ticks, latency=latency, position=position, orientation=orientation) if isinstance(to_position, (tuple, list)): to_position = np.asarray(to_position) if (not isinstance(to_position, np.ndarray)): raise TypeError("Expecting the given 'to_position' to be a np.array, but got instead: {}".format(type(to_position))) if (to_position.shape != (3,)): raise ValueError("Expecting the shape of the given 'to_position' to be (3,), but got instead a shape of: {}".format(to_position.shape)) self.to_position = to_position<|docstring|>Initialize the Ray sensor. Args: simulator (Simulator): simulator instance. body_id (int): unique body id. to_position (np.array[float[3]]): position where the ray should stop with respect to the new local link frame (specified by :attr:`position` and :attr:`orientation`). link_id (int): unique id of the link. noise (None, Noise): noise to be added. ticks (int): number of steps to wait/sleep before acquisition of the next sensor value. latency (int, float, None): latency time / step. position (np.array[float[3]], None): local position of the sensor with respect to the given link. If None, it will be the zero vector. orientation (np.array[float[4]], None): local orientation of the sensor with respect to the given link (expressed as a quaternion [x,y,z,w]). If None, it will be the unit quaternion [0,0,0,1].<|endoftext|>
0abbb41b8b386dc5fea65ac5d1d5cb877924763e9b620242b7a18d4c530fb643
def _sense(self, apply_noise=True): '\n Sense using the ray sensor.\n\n Args:\n apply_noise (bool): if we should apply the noise or not. Note that the sensor might already have some noise.\n\n Returns:\n float: hit fraction along the ray in range [0,1] along the ray.\n ' if self.simulator.supports_sensors('ray'): return self.simulator.get_sensor('ray', self.body_id, self.link_id).sense() position = (self.position + get_rotated_point_from_quaternion(self.orientation, self.to_position)) hit = self.sim.ray_test(from_position=self.position, to_position=position)[2] if apply_noise: hit = self._noise(hit) return hit
Sense using the ray sensor. Args: apply_noise (bool): if we should apply the noise or not. Note that the sensor might already have some noise. Returns: float: hit fraction along the ray in range [0,1] along the ray.
pyrobolearn/robots/sensors/ray.py
_sense
benjaminalt/pyrobolearn
2
python
def _sense(self, apply_noise=True): '\n Sense using the ray sensor.\n\n Args:\n apply_noise (bool): if we should apply the noise or not. Note that the sensor might already have some noise.\n\n Returns:\n float: hit fraction along the ray in range [0,1] along the ray.\n ' if self.simulator.supports_sensors('ray'): return self.simulator.get_sensor('ray', self.body_id, self.link_id).sense() position = (self.position + get_rotated_point_from_quaternion(self.orientation, self.to_position)) hit = self.sim.ray_test(from_position=self.position, to_position=position)[2] if apply_noise: hit = self._noise(hit) return hit
def _sense(self, apply_noise=True): '\n Sense using the ray sensor.\n\n Args:\n apply_noise (bool): if we should apply the noise or not. Note that the sensor might already have some noise.\n\n Returns:\n float: hit fraction along the ray in range [0,1] along the ray.\n ' if self.simulator.supports_sensors('ray'): return self.simulator.get_sensor('ray', self.body_id, self.link_id).sense() position = (self.position + get_rotated_point_from_quaternion(self.orientation, self.to_position)) hit = self.sim.ray_test(from_position=self.position, to_position=position)[2] if apply_noise: hit = self._noise(hit) return hit<|docstring|>Sense using the ray sensor. Args: apply_noise (bool): if we should apply the noise or not. Note that the sensor might already have some noise. Returns: float: hit fraction along the ray in range [0,1] along the ray.<|endoftext|>
de2ed535fac54e171f6bc2d2b76bd1bbf8c3c0dbcca063bc3f717aa5cf4fb8f7
def render(self, enable=True, color=None): 'Render the ray in the simulator; they are only visual and attached to the sensor (link). The visual shape\n is updated at runtime (each time you call this function).\n\n Args:\n enable (bool): if we should render or not.\n color (None, tuple/list[float[4]], np.ndarray[float[4]]): RGBA color of all the rays, where each channel\n is between 0 and 1.\n ' pass
Render the ray in the simulator; they are only visual and attached to the sensor (link). The visual shape is updated at runtime (each time you call this function). Args: enable (bool): if we should render or not. color (None, tuple/list[float[4]], np.ndarray[float[4]]): RGBA color of all the rays, where each channel is between 0 and 1.
pyrobolearn/robots/sensors/ray.py
render
benjaminalt/pyrobolearn
2
python
def render(self, enable=True, color=None): 'Render the ray in the simulator; they are only visual and attached to the sensor (link). The visual shape\n is updated at runtime (each time you call this function).\n\n Args:\n enable (bool): if we should render or not.\n color (None, tuple/list[float[4]], np.ndarray[float[4]]): RGBA color of all the rays, where each channel\n is between 0 and 1.\n ' pass
def render(self, enable=True, color=None): 'Render the ray in the simulator; they are only visual and attached to the sensor (link). The visual shape\n is updated at runtime (each time you call this function).\n\n Args:\n enable (bool): if we should render or not.\n color (None, tuple/list[float[4]], np.ndarray[float[4]]): RGBA color of all the rays, where each channel\n is between 0 and 1.\n ' pass<|docstring|>Render the ray in the simulator; they are only visual and attached to the sensor (link). The visual shape is updated at runtime (each time you call this function). Args: enable (bool): if we should render or not. color (None, tuple/list[float[4]], np.ndarray[float[4]]): RGBA color of all the rays, where each channel is between 0 and 1.<|endoftext|>
a73ecf58c39d782cc99ec580b1f7d5aca30ec0402594161b6d8f8fc08957af68
def __init__(self, simulator, body_id, to_positions, link_id=(- 1), noise=None, ticks=1, latency=None, position=None, orientation=None): '\n Initialize the Ray batch sensor.\n\n Args:\n simulator (Simulator): simulator instance.\n body_id (int): unique body id.\n to_positions (np.array[N,3]): position where each ray should stop with respect to the new local link frame\n (specified by :attr:`position` and :attr:`orientation`).\n link_id (int): unique id of the link.\n noise (None, Noise): noise to be added.\n ticks (int): number of steps to wait/sleep before acquisition of the next sensor value.\n latency (int, float, None): latency time / step.\n position (np.array[float[3]], None): local position of the sensor with respect to the given link. If None,\n it will be the zero vector.\n orientation (np.array[float[4]], None): local orientation of the sensor with respect to the given link\n (expressed as a quaternion [x,y,z,w]). If None, it will be the unit quaternion [0,0,0,1].\n ' super(RayBatchSensor, self).__init__(simulator, body_id=body_id, link_id=link_id, noise=noise, ticks=ticks, latency=latency, position=position, orientation=orientation) if isinstance(to_positions, (tuple, list)): to_positions = np.asarray(to_positions) if (not isinstance(to_positions, np.ndarray)): raise TypeError("Expecting the given 'to_positions' to be a np.array, but got instead: {}".format(type(to_positions))) if (to_positions.ndim != 2): raise ValueError("Expecting the given 'to_positions' to be 2D array, but got instead a {}D array".format(to_positions.ndim)) if (to_positions.shape[1] != 3): raise ValueError("Expecting the shape of the given 'to_positions' to be (N,3), but got instead a shape of: {}".format(to_positions.shape)) if (len(to_positions) > self.sim.MAX_RAY_INTERSECTION_BATCH_SIZE): raise ValueError("The number of 'to_positions' (={}) is bigger than the maximum amount authorized (={})".format(len(to_positions), self.sim.MAX_RAY_INTERSECTION_BATCH_SIZE)) self.to_positions = to_positions
Initialize the Ray batch sensor. Args: simulator (Simulator): simulator instance. body_id (int): unique body id. to_positions (np.array[N,3]): position where each ray should stop with respect to the new local link frame (specified by :attr:`position` and :attr:`orientation`). link_id (int): unique id of the link. noise (None, Noise): noise to be added. ticks (int): number of steps to wait/sleep before acquisition of the next sensor value. latency (int, float, None): latency time / step. position (np.array[float[3]], None): local position of the sensor with respect to the given link. If None, it will be the zero vector. orientation (np.array[float[4]], None): local orientation of the sensor with respect to the given link (expressed as a quaternion [x,y,z,w]). If None, it will be the unit quaternion [0,0,0,1].
pyrobolearn/robots/sensors/ray.py
__init__
benjaminalt/pyrobolearn
2
python
def __init__(self, simulator, body_id, to_positions, link_id=(- 1), noise=None, ticks=1, latency=None, position=None, orientation=None): '\n Initialize the Ray batch sensor.\n\n Args:\n simulator (Simulator): simulator instance.\n body_id (int): unique body id.\n to_positions (np.array[N,3]): position where each ray should stop with respect to the new local link frame\n (specified by :attr:`position` and :attr:`orientation`).\n link_id (int): unique id of the link.\n noise (None, Noise): noise to be added.\n ticks (int): number of steps to wait/sleep before acquisition of the next sensor value.\n latency (int, float, None): latency time / step.\n position (np.array[float[3]], None): local position of the sensor with respect to the given link. If None,\n it will be the zero vector.\n orientation (np.array[float[4]], None): local orientation of the sensor with respect to the given link\n (expressed as a quaternion [x,y,z,w]). If None, it will be the unit quaternion [0,0,0,1].\n ' super(RayBatchSensor, self).__init__(simulator, body_id=body_id, link_id=link_id, noise=noise, ticks=ticks, latency=latency, position=position, orientation=orientation) if isinstance(to_positions, (tuple, list)): to_positions = np.asarray(to_positions) if (not isinstance(to_positions, np.ndarray)): raise TypeError("Expecting the given 'to_positions' to be a np.array, but got instead: {}".format(type(to_positions))) if (to_positions.ndim != 2): raise ValueError("Expecting the given 'to_positions' to be 2D array, but got instead a {}D array".format(to_positions.ndim)) if (to_positions.shape[1] != 3): raise ValueError("Expecting the shape of the given 'to_positions' to be (N,3), but got instead a shape of: {}".format(to_positions.shape)) if (len(to_positions) > self.sim.MAX_RAY_INTERSECTION_BATCH_SIZE): raise ValueError("The number of 'to_positions' (={}) is bigger than the maximum amount authorized (={})".format(len(to_positions), self.sim.MAX_RAY_INTERSECTION_BATCH_SIZE)) self.to_positions = to_positions
def __init__(self, simulator, body_id, to_positions, link_id=(- 1), noise=None, ticks=1, latency=None, position=None, orientation=None): '\n Initialize the Ray batch sensor.\n\n Args:\n simulator (Simulator): simulator instance.\n body_id (int): unique body id.\n to_positions (np.array[N,3]): position where each ray should stop with respect to the new local link frame\n (specified by :attr:`position` and :attr:`orientation`).\n link_id (int): unique id of the link.\n noise (None, Noise): noise to be added.\n ticks (int): number of steps to wait/sleep before acquisition of the next sensor value.\n latency (int, float, None): latency time / step.\n position (np.array[float[3]], None): local position of the sensor with respect to the given link. If None,\n it will be the zero vector.\n orientation (np.array[float[4]], None): local orientation of the sensor with respect to the given link\n (expressed as a quaternion [x,y,z,w]). If None, it will be the unit quaternion [0,0,0,1].\n ' super(RayBatchSensor, self).__init__(simulator, body_id=body_id, link_id=link_id, noise=noise, ticks=ticks, latency=latency, position=position, orientation=orientation) if isinstance(to_positions, (tuple, list)): to_positions = np.asarray(to_positions) if (not isinstance(to_positions, np.ndarray)): raise TypeError("Expecting the given 'to_positions' to be a np.array, but got instead: {}".format(type(to_positions))) if (to_positions.ndim != 2): raise ValueError("Expecting the given 'to_positions' to be 2D array, but got instead a {}D array".format(to_positions.ndim)) if (to_positions.shape[1] != 3): raise ValueError("Expecting the shape of the given 'to_positions' to be (N,3), but got instead a shape of: {}".format(to_positions.shape)) if (len(to_positions) > self.sim.MAX_RAY_INTERSECTION_BATCH_SIZE): raise ValueError("The number of 'to_positions' (={}) is bigger than the maximum amount authorized (={})".format(len(to_positions), self.sim.MAX_RAY_INTERSECTION_BATCH_SIZE)) self.to_positions = to_positions<|docstring|>Initialize the Ray batch sensor. Args: simulator (Simulator): simulator instance. body_id (int): unique body id. to_positions (np.array[N,3]): position where each ray should stop with respect to the new local link frame (specified by :attr:`position` and :attr:`orientation`). link_id (int): unique id of the link. noise (None, Noise): noise to be added. ticks (int): number of steps to wait/sleep before acquisition of the next sensor value. latency (int, float, None): latency time / step. position (np.array[float[3]], None): local position of the sensor with respect to the given link. If None, it will be the zero vector. orientation (np.array[float[4]], None): local orientation of the sensor with respect to the given link (expressed as a quaternion [x,y,z,w]). If None, it will be the unit quaternion [0,0,0,1].<|endoftext|>
5ca0a996ab02c8c323148968a65fbcddcdf44644d09f22439f29b31e5e0b4a18
def _sense(self, apply_noise=True): '\n Sense using the ray batch sensor.\n\n Args:\n apply_noise (bool): if we should apply the noise or not. Note that the sensor might already have some noise.\n\n Returns:\n np.array[float[N]]: hit fractions along each ray in range [0,1] along the ray.\n ' if self.simulator.supports_sensors('ray_batch'): return self.simulator.get_sensor('ray_batch', self.body_id, self.link_id).sense() position = (self.position + get_rotated_point_from_quaternion(self.orientation, self.to_positions)) rays = self.sim.ray_test_batch(from_positions=self.position, to_positions=position) hit = np.array([ray[2] for ray in rays]) if apply_noise: hit = self._noise(hit) return hit
Sense using the ray batch sensor. Args: apply_noise (bool): if we should apply the noise or not. Note that the sensor might already have some noise. Returns: np.array[float[N]]: hit fractions along each ray in range [0,1] along the ray.
pyrobolearn/robots/sensors/ray.py
_sense
benjaminalt/pyrobolearn
2
python
def _sense(self, apply_noise=True): '\n Sense using the ray batch sensor.\n\n Args:\n apply_noise (bool): if we should apply the noise or not. Note that the sensor might already have some noise.\n\n Returns:\n np.array[float[N]]: hit fractions along each ray in range [0,1] along the ray.\n ' if self.simulator.supports_sensors('ray_batch'): return self.simulator.get_sensor('ray_batch', self.body_id, self.link_id).sense() position = (self.position + get_rotated_point_from_quaternion(self.orientation, self.to_positions)) rays = self.sim.ray_test_batch(from_positions=self.position, to_positions=position) hit = np.array([ray[2] for ray in rays]) if apply_noise: hit = self._noise(hit) return hit
def _sense(self, apply_noise=True): '\n Sense using the ray batch sensor.\n\n Args:\n apply_noise (bool): if we should apply the noise or not. Note that the sensor might already have some noise.\n\n Returns:\n np.array[float[N]]: hit fractions along each ray in range [0,1] along the ray.\n ' if self.simulator.supports_sensors('ray_batch'): return self.simulator.get_sensor('ray_batch', self.body_id, self.link_id).sense() position = (self.position + get_rotated_point_from_quaternion(self.orientation, self.to_positions)) rays = self.sim.ray_test_batch(from_positions=self.position, to_positions=position) hit = np.array([ray[2] for ray in rays]) if apply_noise: hit = self._noise(hit) return hit<|docstring|>Sense using the ray batch sensor. Args: apply_noise (bool): if we should apply the noise or not. Note that the sensor might already have some noise. Returns: np.array[float[N]]: hit fractions along each ray in range [0,1] along the ray.<|endoftext|>
913f8905ffd6625451c3907e635830c3490771effe94cc4bce58d0c0e61f19ed
def render(self, enable=True, color=None): 'Render the batch of rays in the simulator; they are only visual and attached to the sensor (link). The\n visual shape of each ray is updated at runtime (each time you call this function).\n\n Args:\n enable (bool): if we should render or not.\n color (None, tuple/list[float[4]], np.ndarray[float[4]]): RGBA color of all the rays, where each channel\n is between 0 and 1.\n ' pass
Render the batch of rays in the simulator; they are only visual and attached to the sensor (link). The visual shape of each ray is updated at runtime (each time you call this function). Args: enable (bool): if we should render or not. color (None, tuple/list[float[4]], np.ndarray[float[4]]): RGBA color of all the rays, where each channel is between 0 and 1.
pyrobolearn/robots/sensors/ray.py
render
benjaminalt/pyrobolearn
2
python
def render(self, enable=True, color=None): 'Render the batch of rays in the simulator; they are only visual and attached to the sensor (link). The\n visual shape of each ray is updated at runtime (each time you call this function).\n\n Args:\n enable (bool): if we should render or not.\n color (None, tuple/list[float[4]], np.ndarray[float[4]]): RGBA color of all the rays, where each channel\n is between 0 and 1.\n ' pass
def render(self, enable=True, color=None): 'Render the batch of rays in the simulator; they are only visual and attached to the sensor (link). The\n visual shape of each ray is updated at runtime (each time you call this function).\n\n Args:\n enable (bool): if we should render or not.\n color (None, tuple/list[float[4]], np.ndarray[float[4]]): RGBA color of all the rays, where each channel\n is between 0 and 1.\n ' pass<|docstring|>Render the batch of rays in the simulator; they are only visual and attached to the sensor (link). The visual shape of each ray is updated at runtime (each time you call this function). Args: enable (bool): if we should render or not. color (None, tuple/list[float[4]], np.ndarray[float[4]]): RGBA color of all the rays, where each channel is between 0 and 1.<|endoftext|>
cb47c2bab1c157d8e1823ac0d8e64449d59c51580fc757dd326dd356b583a26e
def __init__(self, simulator, body_id, link_id, width, height, num_rays_width=2, num_rays_height=2, max_ray_length=100, position=None, orientation=None): "\n Initialize the heightmap sensor. This is only valid in the simulator.\n\n Note that `num_rays_width * num_rays_height` has to be smaller than `simulator.MAX_RAY_INTERSECTION_BATCH_SIZE`.\n In pybullet, this is currently set to 16,384.\n\n Args:\n simulator (Simulator): simulator instance.\n body_id (int): unique id of the body\n link_id (int): unique id of the link\n width (float): width of the map (along the left-right axis (i.e. y axis) of the body, measured in meters)\n height (float): height of the map (along the front-back axis (i.e. x axis) of the body, measured in meters)\n num_rays_width (int): number of rays along the width dimension (left-right axis). This will be the 'width'\n of the returned heightmap. This must be bigger or equal to 2.\n num_rays_height (int): number of rays along the height dimension (front-back axis). This will be\n the 'height' of the returned heightmap. This must be bigger or equal to 2.\n max_ray_length (float): maximum length of each ray.\n position (np.array[float[3]], None): local position of the sensor with respect to the given link. If None,\n it will be the zero vector. This position represents the center of the map.\n orientation (np.array[float[4]], None): local orientation of the sensor with respect to the given link\n (expressed as a quaternion [x,y,z,w]). If None, it will be the unit quaternion [0,0,0,1].\n " super(HeightmapSensor, self).__init__(simulator, body_id=body_id, link_id=link_id, position=position, orientation=orientation) (num_rays_width, num_rays_height) = (int(num_rays_width), int(num_rays_height)) if ((num_rays_width * num_rays_height) > self.sim.MAX_RAY_INTERSECTION_BATCH_SIZE): raise ValueError('num_rays_width * num_rays_height can not be bigger than {}'.format(self.sim.MAX_RAY_INTERSECTION_BATCH_SIZE)) if (num_rays_width < 2): raise ValueError('num_rays_width must be equal or bigger than 2, but got: {}'.format(num_rays_width)) if (num_rays_height < 2): raise ValueError('num_rays_height must be equal or bigger than 2, but got: {}'.format(num_rays_height)) self._num_rays_width = num_rays_width self._num_rays_height = num_rays_height if (not isinstance(max_ray_length, (float, int))): raise TypeError("Expecting 'max_ray_length' to be an int or float, but got instead: {}".format(type(max_ray_length))) max_ray_length = float(max_ray_length) if (max_ray_length <= 0.0): raise ValueError("Expecting 'max_ray_length' to be positive, but got instead: {}".format(max_ray_length)) self._max_ray_length = max_ray_length (width, height) = (float(width), float(height)) if (width <= 0.0): raise ValueError("Expecting the 'width' to be bigger than 0, but got: {}".format(width)) if (height <= 0.0): raise ValueError("Expecting the 'height' to be bigger than 0, but got: {}".format(height)) self._width = width self._height = height self._z_array = np.ones((self._num_rays_width * self._num_rays_height))
Initialize the heightmap sensor. This is only valid in the simulator. Note that `num_rays_width * num_rays_height` has to be smaller than `simulator.MAX_RAY_INTERSECTION_BATCH_SIZE`. In pybullet, this is currently set to 16,384. Args: simulator (Simulator): simulator instance. body_id (int): unique id of the body link_id (int): unique id of the link width (float): width of the map (along the left-right axis (i.e. y axis) of the body, measured in meters) height (float): height of the map (along the front-back axis (i.e. x axis) of the body, measured in meters) num_rays_width (int): number of rays along the width dimension (left-right axis). This will be the 'width' of the returned heightmap. This must be bigger or equal to 2. num_rays_height (int): number of rays along the height dimension (front-back axis). This will be the 'height' of the returned heightmap. This must be bigger or equal to 2. max_ray_length (float): maximum length of each ray. position (np.array[float[3]], None): local position of the sensor with respect to the given link. If None, it will be the zero vector. This position represents the center of the map. orientation (np.array[float[4]], None): local orientation of the sensor with respect to the given link (expressed as a quaternion [x,y,z,w]). If None, it will be the unit quaternion [0,0,0,1].
pyrobolearn/robots/sensors/ray.py
__init__
benjaminalt/pyrobolearn
2
python
def __init__(self, simulator, body_id, link_id, width, height, num_rays_width=2, num_rays_height=2, max_ray_length=100, position=None, orientation=None): "\n Initialize the heightmap sensor. This is only valid in the simulator.\n\n Note that `num_rays_width * num_rays_height` has to be smaller than `simulator.MAX_RAY_INTERSECTION_BATCH_SIZE`.\n In pybullet, this is currently set to 16,384.\n\n Args:\n simulator (Simulator): simulator instance.\n body_id (int): unique id of the body\n link_id (int): unique id of the link\n width (float): width of the map (along the left-right axis (i.e. y axis) of the body, measured in meters)\n height (float): height of the map (along the front-back axis (i.e. x axis) of the body, measured in meters)\n num_rays_width (int): number of rays along the width dimension (left-right axis). This will be the 'width'\n of the returned heightmap. This must be bigger or equal to 2.\n num_rays_height (int): number of rays along the height dimension (front-back axis). This will be\n the 'height' of the returned heightmap. This must be bigger or equal to 2.\n max_ray_length (float): maximum length of each ray.\n position (np.array[float[3]], None): local position of the sensor with respect to the given link. If None,\n it will be the zero vector. This position represents the center of the map.\n orientation (np.array[float[4]], None): local orientation of the sensor with respect to the given link\n (expressed as a quaternion [x,y,z,w]). If None, it will be the unit quaternion [0,0,0,1].\n " super(HeightmapSensor, self).__init__(simulator, body_id=body_id, link_id=link_id, position=position, orientation=orientation) (num_rays_width, num_rays_height) = (int(num_rays_width), int(num_rays_height)) if ((num_rays_width * num_rays_height) > self.sim.MAX_RAY_INTERSECTION_BATCH_SIZE): raise ValueError('num_rays_width * num_rays_height can not be bigger than {}'.format(self.sim.MAX_RAY_INTERSECTION_BATCH_SIZE)) if (num_rays_width < 2): raise ValueError('num_rays_width must be equal or bigger than 2, but got: {}'.format(num_rays_width)) if (num_rays_height < 2): raise ValueError('num_rays_height must be equal or bigger than 2, but got: {}'.format(num_rays_height)) self._num_rays_width = num_rays_width self._num_rays_height = num_rays_height if (not isinstance(max_ray_length, (float, int))): raise TypeError("Expecting 'max_ray_length' to be an int or float, but got instead: {}".format(type(max_ray_length))) max_ray_length = float(max_ray_length) if (max_ray_length <= 0.0): raise ValueError("Expecting 'max_ray_length' to be positive, but got instead: {}".format(max_ray_length)) self._max_ray_length = max_ray_length (width, height) = (float(width), float(height)) if (width <= 0.0): raise ValueError("Expecting the 'width' to be bigger than 0, but got: {}".format(width)) if (height <= 0.0): raise ValueError("Expecting the 'height' to be bigger than 0, but got: {}".format(height)) self._width = width self._height = height self._z_array = np.ones((self._num_rays_width * self._num_rays_height))
def __init__(self, simulator, body_id, link_id, width, height, num_rays_width=2, num_rays_height=2, max_ray_length=100, position=None, orientation=None): "\n Initialize the heightmap sensor. This is only valid in the simulator.\n\n Note that `num_rays_width * num_rays_height` has to be smaller than `simulator.MAX_RAY_INTERSECTION_BATCH_SIZE`.\n In pybullet, this is currently set to 16,384.\n\n Args:\n simulator (Simulator): simulator instance.\n body_id (int): unique id of the body\n link_id (int): unique id of the link\n width (float): width of the map (along the left-right axis (i.e. y axis) of the body, measured in meters)\n height (float): height of the map (along the front-back axis (i.e. x axis) of the body, measured in meters)\n num_rays_width (int): number of rays along the width dimension (left-right axis). This will be the 'width'\n of the returned heightmap. This must be bigger or equal to 2.\n num_rays_height (int): number of rays along the height dimension (front-back axis). This will be\n the 'height' of the returned heightmap. This must be bigger or equal to 2.\n max_ray_length (float): maximum length of each ray.\n position (np.array[float[3]], None): local position of the sensor with respect to the given link. If None,\n it will be the zero vector. This position represents the center of the map.\n orientation (np.array[float[4]], None): local orientation of the sensor with respect to the given link\n (expressed as a quaternion [x,y,z,w]). If None, it will be the unit quaternion [0,0,0,1].\n " super(HeightmapSensor, self).__init__(simulator, body_id=body_id, link_id=link_id, position=position, orientation=orientation) (num_rays_width, num_rays_height) = (int(num_rays_width), int(num_rays_height)) if ((num_rays_width * num_rays_height) > self.sim.MAX_RAY_INTERSECTION_BATCH_SIZE): raise ValueError('num_rays_width * num_rays_height can not be bigger than {}'.format(self.sim.MAX_RAY_INTERSECTION_BATCH_SIZE)) if (num_rays_width < 2): raise ValueError('num_rays_width must be equal or bigger than 2, but got: {}'.format(num_rays_width)) if (num_rays_height < 2): raise ValueError('num_rays_height must be equal or bigger than 2, but got: {}'.format(num_rays_height)) self._num_rays_width = num_rays_width self._num_rays_height = num_rays_height if (not isinstance(max_ray_length, (float, int))): raise TypeError("Expecting 'max_ray_length' to be an int or float, but got instead: {}".format(type(max_ray_length))) max_ray_length = float(max_ray_length) if (max_ray_length <= 0.0): raise ValueError("Expecting 'max_ray_length' to be positive, but got instead: {}".format(max_ray_length)) self._max_ray_length = max_ray_length (width, height) = (float(width), float(height)) if (width <= 0.0): raise ValueError("Expecting the 'width' to be bigger than 0, but got: {}".format(width)) if (height <= 0.0): raise ValueError("Expecting the 'height' to be bigger than 0, but got: {}".format(height)) self._width = width self._height = height self._z_array = np.ones((self._num_rays_width * self._num_rays_height))<|docstring|>Initialize the heightmap sensor. This is only valid in the simulator. Note that `num_rays_width * num_rays_height` has to be smaller than `simulator.MAX_RAY_INTERSECTION_BATCH_SIZE`. In pybullet, this is currently set to 16,384. Args: simulator (Simulator): simulator instance. body_id (int): unique id of the body link_id (int): unique id of the link width (float): width of the map (along the left-right axis (i.e. y axis) of the body, measured in meters) height (float): height of the map (along the front-back axis (i.e. x axis) of the body, measured in meters) num_rays_width (int): number of rays along the width dimension (left-right axis). This will be the 'width' of the returned heightmap. This must be bigger or equal to 2. num_rays_height (int): number of rays along the height dimension (front-back axis). This will be the 'height' of the returned heightmap. This must be bigger or equal to 2. max_ray_length (float): maximum length of each ray. position (np.array[float[3]], None): local position of the sensor with respect to the given link. If None, it will be the zero vector. This position represents the center of the map. orientation (np.array[float[4]], None): local orientation of the sensor with respect to the given link (expressed as a quaternion [x,y,z,w]). If None, it will be the unit quaternion [0,0,0,1].<|endoftext|>
118a3bade9d45dae163424d95ef905dac99ae84b67a897d1979d5fb7d02dda4f
def get_ray_from_to_positions(self): '\n Return the world positions for the rays to start and end.\n\n Returns:\n np.array[float[N,3]]: list of starting positions for the rays\n np.array[float[N,3]]: list of ending positions for the rays\n ' pos = self.position (w2, h2) = ((self._width / 2.0), (self._height / 2.0)) (x, y) = np.meshgrid(np.linspace((pos[1] - w2), (pos[1] + w2), self._num_rays_width), np.linspace((pos[0] - h2), (pos[0] + h2), self._num_rays_height)) (x, y) = (x.ravel(), y.ravel()) from_z = (pos[2] * self._z_array) to_z = (from_z - self._max_ray_length) from_positions = np.vstack((x, y, from_z)).T to_positions = np.vstack((x, y, to_z)).T return (from_positions, to_positions)
Return the world positions for the rays to start and end. Returns: np.array[float[N,3]]: list of starting positions for the rays np.array[float[N,3]]: list of ending positions for the rays
pyrobolearn/robots/sensors/ray.py
get_ray_from_to_positions
benjaminalt/pyrobolearn
2
python
def get_ray_from_to_positions(self): '\n Return the world positions for the rays to start and end.\n\n Returns:\n np.array[float[N,3]]: list of starting positions for the rays\n np.array[float[N,3]]: list of ending positions for the rays\n ' pos = self.position (w2, h2) = ((self._width / 2.0), (self._height / 2.0)) (x, y) = np.meshgrid(np.linspace((pos[1] - w2), (pos[1] + w2), self._num_rays_width), np.linspace((pos[0] - h2), (pos[0] + h2), self._num_rays_height)) (x, y) = (x.ravel(), y.ravel()) from_z = (pos[2] * self._z_array) to_z = (from_z - self._max_ray_length) from_positions = np.vstack((x, y, from_z)).T to_positions = np.vstack((x, y, to_z)).T return (from_positions, to_positions)
def get_ray_from_to_positions(self): '\n Return the world positions for the rays to start and end.\n\n Returns:\n np.array[float[N,3]]: list of starting positions for the rays\n np.array[float[N,3]]: list of ending positions for the rays\n ' pos = self.position (w2, h2) = ((self._width / 2.0), (self._height / 2.0)) (x, y) = np.meshgrid(np.linspace((pos[1] - w2), (pos[1] + w2), self._num_rays_width), np.linspace((pos[0] - h2), (pos[0] + h2), self._num_rays_height)) (x, y) = (x.ravel(), y.ravel()) from_z = (pos[2] * self._z_array) to_z = (from_z - self._max_ray_length) from_positions = np.vstack((x, y, from_z)).T to_positions = np.vstack((x, y, to_z)).T return (from_positions, to_positions)<|docstring|>Return the world positions for the rays to start and end. Returns: np.array[float[N,3]]: list of starting positions for the rays np.array[float[N,3]]: list of ending positions for the rays<|endoftext|>
a4dcdfa0a628a2913f3422cf9bea488123a6515c5edfd4c01bfdd076daf06e31
def _sense(self, apply_noise=True): '\n Return the heightmap.\n\n Returns:\n np.array[float[width, height]]: Height map with shape [width, height] where the values are the hit\n fractions [0,1], you can multiply it by :attr:`max_ray_length` to get the depth in meters.\n ' (from_positions, to_positions) = self.get_ray_from_to_positions() rays = self.sim.ray_test_batch(from_positions=from_positions, to_positions=to_positions) hit = np.array([ray[2] for ray in rays]).reshape(self._num_rays_width, self._num_rays_height) if apply_noise: hit = self._noise(hit) return hit
Return the heightmap. Returns: np.array[float[width, height]]: Height map with shape [width, height] where the values are the hit fractions [0,1], you can multiply it by :attr:`max_ray_length` to get the depth in meters.
pyrobolearn/robots/sensors/ray.py
_sense
benjaminalt/pyrobolearn
2
python
def _sense(self, apply_noise=True): '\n Return the heightmap.\n\n Returns:\n np.array[float[width, height]]: Height map with shape [width, height] where the values are the hit\n fractions [0,1], you can multiply it by :attr:`max_ray_length` to get the depth in meters.\n ' (from_positions, to_positions) = self.get_ray_from_to_positions() rays = self.sim.ray_test_batch(from_positions=from_positions, to_positions=to_positions) hit = np.array([ray[2] for ray in rays]).reshape(self._num_rays_width, self._num_rays_height) if apply_noise: hit = self._noise(hit) return hit
def _sense(self, apply_noise=True): '\n Return the heightmap.\n\n Returns:\n np.array[float[width, height]]: Height map with shape [width, height] where the values are the hit\n fractions [0,1], you can multiply it by :attr:`max_ray_length` to get the depth in meters.\n ' (from_positions, to_positions) = self.get_ray_from_to_positions() rays = self.sim.ray_test_batch(from_positions=from_positions, to_positions=to_positions) hit = np.array([ray[2] for ray in rays]).reshape(self._num_rays_width, self._num_rays_height) if apply_noise: hit = self._noise(hit) return hit<|docstring|>Return the heightmap. Returns: np.array[float[width, height]]: Height map with shape [width, height] where the values are the hit fractions [0,1], you can multiply it by :attr:`max_ray_length` to get the depth in meters.<|endoftext|>
d3a93afe3f2784fdcf0b8e7ddab9cf7d051abb73a78a0ab3262d6cb1841899c7
def render(self, enable=True, color=None): 'Render the grid map in the simulator; each point/intersection in the grid is represented as a visual\n sphere. The position of these spheres is updated at runtime (each time you call this function).\n\n Args:\n enable (bool): if we should render or not.\n color (None, tuple/list of 4 float, np.ndarray[float[4]]): RGBA color of all the rays, where each channel\n is between 0 and 1.\n ' pass
Render the grid map in the simulator; each point/intersection in the grid is represented as a visual sphere. The position of these spheres is updated at runtime (each time you call this function). Args: enable (bool): if we should render or not. color (None, tuple/list of 4 float, np.ndarray[float[4]]): RGBA color of all the rays, where each channel is between 0 and 1.
pyrobolearn/robots/sensors/ray.py
render
benjaminalt/pyrobolearn
2
python
def render(self, enable=True, color=None): 'Render the grid map in the simulator; each point/intersection in the grid is represented as a visual\n sphere. The position of these spheres is updated at runtime (each time you call this function).\n\n Args:\n enable (bool): if we should render or not.\n color (None, tuple/list of 4 float, np.ndarray[float[4]]): RGBA color of all the rays, where each channel\n is between 0 and 1.\n ' pass
def render(self, enable=True, color=None): 'Render the grid map in the simulator; each point/intersection in the grid is represented as a visual\n sphere. The position of these spheres is updated at runtime (each time you call this function).\n\n Args:\n enable (bool): if we should render or not.\n color (None, tuple/list of 4 float, np.ndarray[float[4]]): RGBA color of all the rays, where each channel\n is between 0 and 1.\n ' pass<|docstring|>Render the grid map in the simulator; each point/intersection in the grid is represented as a visual sphere. The position of these spheres is updated at runtime (each time you call this function). Args: enable (bool): if we should render or not. color (None, tuple/list of 4 float, np.ndarray[float[4]]): RGBA color of all the rays, where each channel is between 0 and 1.<|endoftext|>