repo stringlengths 7 55 | path stringlengths 4 127 | func_name stringlengths 1 88 | original_string stringlengths 75 19.8k | language stringclasses 1
value | code stringlengths 75 19.8k | code_tokens listlengths 20 707 | docstring stringlengths 3 17.3k | docstring_tokens listlengths 3 222 | sha stringlengths 40 40 | url stringlengths 87 242 | partition stringclasses 1
value | idx int64 0 252k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
resonai/ybt | yabt/target_utils.py | hashify_files | def hashify_files(files: list) -> dict:
"""Return mapping from file path to file hash."""
return {filepath.replace('\\', '/'): hash_tree(filepath)
for filepath in listify(files)} | python | def hashify_files(files: list) -> dict:
"""Return mapping from file path to file hash."""
return {filepath.replace('\\', '/'): hash_tree(filepath)
for filepath in listify(files)} | [
"def",
"hashify_files",
"(",
"files",
":",
"list",
")",
"->",
"dict",
":",
"return",
"{",
"filepath",
".",
"replace",
"(",
"'\\\\'",
",",
"'/'",
")",
":",
"hash_tree",
"(",
"filepath",
")",
"for",
"filepath",
"in",
"listify",
"(",
"files",
")",
"}"
] | Return mapping from file path to file hash. | [
"Return",
"mapping",
"from",
"file",
"path",
"to",
"file",
"hash",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/target_utils.py#L135-L138 | train | 44,100 |
resonai/ybt | yabt/target_utils.py | process_prop | def process_prop(prop_type: PT, value, build_context):
"""Return a cachable representation of the prop `value` given its type."""
if prop_type in (PT.Target, PT.TargetList):
return hashify_targets(value, build_context)
elif prop_type in (PT.File, PT.FileList):
return hashify_files(value)
return value | python | def process_prop(prop_type: PT, value, build_context):
"""Return a cachable representation of the prop `value` given its type."""
if prop_type in (PT.Target, PT.TargetList):
return hashify_targets(value, build_context)
elif prop_type in (PT.File, PT.FileList):
return hashify_files(value)
return value | [
"def",
"process_prop",
"(",
"prop_type",
":",
"PT",
",",
"value",
",",
"build_context",
")",
":",
"if",
"prop_type",
"in",
"(",
"PT",
".",
"Target",
",",
"PT",
".",
"TargetList",
")",
":",
"return",
"hashify_targets",
"(",
"value",
",",
"build_context",
... | Return a cachable representation of the prop `value` given its type. | [
"Return",
"a",
"cachable",
"representation",
"of",
"the",
"prop",
"value",
"given",
"its",
"type",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/target_utils.py#L141-L147 | train | 44,101 |
resonai/ybt | yabt/target_utils.py | Target.compute_json | def compute_json(self, build_context):
"""Compute and store a JSON serialization of this target for caching
purposes.
The serialization includes:
- The build flavor
- The builder name
- Target tags
- Hashes of target dependencies & buildenv
- Processed props (where target props are replaced with their hashes,
and file props are replaced with mapping from file name to its hash)
It specifically does NOT include:
- Artifacts produced by the target
The target name is currently included, although it would be better off
to leave it out, and allow targets to be renamed without affecting
their caching status (if it's just a rename).
It is currently included because it's the easy way to account for the
fact that when cached artifacts are restored, their path may be a
function of the target name in non-essential ways (such as a workspace
dir name).
"""
props = {}
test_props = {}
for prop in self.props:
if prop in self._prop_json_blacklist:
continue
sig_spec = Plugin.builders[self.builder_name].sig.get(prop)
if sig_spec is None:
continue
if prop in self._prop_json_testlist:
test_props[prop] = process_prop(sig_spec.type,
self.props[prop],
build_context)
else:
props[prop] = process_prop(sig_spec.type, self.props[prop],
build_context)
json_dict = dict(
# TODO: avoid including the name in the hashed json...
name=self.name,
builder_name=self.builder_name,
deps=hashify_targets(self.deps, build_context),
props=props,
buildenv=hashify_targets(self.buildenv, build_context),
tags=sorted(list(self.tags)),
flavor=build_context.conf.flavor, # TODO: any other conf args?
# yabt_version=__version__, # TODO: is this needed?
)
json_test_dict = dict(
props=test_props,
)
self._json = json.dumps(json_dict, sort_keys=True, indent=4)
self._test_json = json.dumps(json_test_dict, sort_keys=True, indent=4) | python | def compute_json(self, build_context):
"""Compute and store a JSON serialization of this target for caching
purposes.
The serialization includes:
- The build flavor
- The builder name
- Target tags
- Hashes of target dependencies & buildenv
- Processed props (where target props are replaced with their hashes,
and file props are replaced with mapping from file name to its hash)
It specifically does NOT include:
- Artifacts produced by the target
The target name is currently included, although it would be better off
to leave it out, and allow targets to be renamed without affecting
their caching status (if it's just a rename).
It is currently included because it's the easy way to account for the
fact that when cached artifacts are restored, their path may be a
function of the target name in non-essential ways (such as a workspace
dir name).
"""
props = {}
test_props = {}
for prop in self.props:
if prop in self._prop_json_blacklist:
continue
sig_spec = Plugin.builders[self.builder_name].sig.get(prop)
if sig_spec is None:
continue
if prop in self._prop_json_testlist:
test_props[prop] = process_prop(sig_spec.type,
self.props[prop],
build_context)
else:
props[prop] = process_prop(sig_spec.type, self.props[prop],
build_context)
json_dict = dict(
# TODO: avoid including the name in the hashed json...
name=self.name,
builder_name=self.builder_name,
deps=hashify_targets(self.deps, build_context),
props=props,
buildenv=hashify_targets(self.buildenv, build_context),
tags=sorted(list(self.tags)),
flavor=build_context.conf.flavor, # TODO: any other conf args?
# yabt_version=__version__, # TODO: is this needed?
)
json_test_dict = dict(
props=test_props,
)
self._json = json.dumps(json_dict, sort_keys=True, indent=4)
self._test_json = json.dumps(json_test_dict, sort_keys=True, indent=4) | [
"def",
"compute_json",
"(",
"self",
",",
"build_context",
")",
":",
"props",
"=",
"{",
"}",
"test_props",
"=",
"{",
"}",
"for",
"prop",
"in",
"self",
".",
"props",
":",
"if",
"prop",
"in",
"self",
".",
"_prop_json_blacklist",
":",
"continue",
"sig_spec",... | Compute and store a JSON serialization of this target for caching
purposes.
The serialization includes:
- The build flavor
- The builder name
- Target tags
- Hashes of target dependencies & buildenv
- Processed props (where target props are replaced with their hashes,
and file props are replaced with mapping from file name to its hash)
It specifically does NOT include:
- Artifacts produced by the target
The target name is currently included, although it would be better off
to leave it out, and allow targets to be renamed without affecting
their caching status (if it's just a rename).
It is currently included because it's the easy way to account for the
fact that when cached artifacts are restored, their path may be a
function of the target name in non-essential ways (such as a workspace
dir name). | [
"Compute",
"and",
"store",
"a",
"JSON",
"serialization",
"of",
"this",
"target",
"for",
"caching",
"purposes",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/target_utils.py#L191-L245 | train | 44,102 |
resonai/ybt | yabt/target_utils.py | Target.json | def json(self, build_context) -> str:
"""Return JSON serialization of this target for caching purposes."""
if self._json is None:
self.compute_json(build_context)
return self._json | python | def json(self, build_context) -> str:
"""Return JSON serialization of this target for caching purposes."""
if self._json is None:
self.compute_json(build_context)
return self._json | [
"def",
"json",
"(",
"self",
",",
"build_context",
")",
"->",
"str",
":",
"if",
"self",
".",
"_json",
"is",
"None",
":",
"self",
".",
"compute_json",
"(",
"build_context",
")",
"return",
"self",
".",
"_json"
] | Return JSON serialization of this target for caching purposes. | [
"Return",
"JSON",
"serialization",
"of",
"this",
"target",
"for",
"caching",
"purposes",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/target_utils.py#L247-L251 | train | 44,103 |
resonai/ybt | yabt/target_utils.py | Target.compute_hash | def compute_hash(self, build_context):
"""Compute and store the hash of this target for caching purposes.
The hash is computed over the target JSON representation.
"""
m = md5()
m.update(self.json(build_context).encode('utf8'))
self._hash = m.hexdigest()
m = md5()
m.update(self.test_json(build_context).encode('utf8'))
self._test_hash = m.hexdigest() | python | def compute_hash(self, build_context):
"""Compute and store the hash of this target for caching purposes.
The hash is computed over the target JSON representation.
"""
m = md5()
m.update(self.json(build_context).encode('utf8'))
self._hash = m.hexdigest()
m = md5()
m.update(self.test_json(build_context).encode('utf8'))
self._test_hash = m.hexdigest() | [
"def",
"compute_hash",
"(",
"self",
",",
"build_context",
")",
":",
"m",
"=",
"md5",
"(",
")",
"m",
".",
"update",
"(",
"self",
".",
"json",
"(",
"build_context",
")",
".",
"encode",
"(",
"'utf8'",
")",
")",
"self",
".",
"_hash",
"=",
"m",
".",
"... | Compute and store the hash of this target for caching purposes.
The hash is computed over the target JSON representation. | [
"Compute",
"and",
"store",
"the",
"hash",
"of",
"this",
"target",
"for",
"caching",
"purposes",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/target_utils.py#L260-L270 | train | 44,104 |
resonai/ybt | yabt/target_utils.py | Target.hash | def hash(self, build_context) -> str:
"""Return the hash of this target for caching purposes."""
if self._hash is None:
self.compute_hash(build_context)
return self._hash | python | def hash(self, build_context) -> str:
"""Return the hash of this target for caching purposes."""
if self._hash is None:
self.compute_hash(build_context)
return self._hash | [
"def",
"hash",
"(",
"self",
",",
"build_context",
")",
"->",
"str",
":",
"if",
"self",
".",
"_hash",
"is",
"None",
":",
"self",
".",
"compute_hash",
"(",
"build_context",
")",
"return",
"self",
".",
"_hash"
] | Return the hash of this target for caching purposes. | [
"Return",
"the",
"hash",
"of",
"this",
"target",
"for",
"caching",
"purposes",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/target_utils.py#L272-L276 | train | 44,105 |
resonai/ybt | yabt/docker.py | handle_build_cache | def handle_build_cache(
conf: Config, name: str, tag: str, icb: ImageCachingBehavior):
"""Handle Docker image build cache.
Return image ID if image is cached, and there's no need to redo the build.
Return None if need to build the image (whether cached locally or not).
Raise RuntimeError if not allowed to build the image because of state of
local cache.
TODO(itamar): figure out a better name for this function, that reflects
what it returns (e.g. `get_cached_image_id`),
without "surprising" the caller with the potential of long
and non-trivial operations that are not usually expected from functions
with such names.
"""
if icb.pull_if_cached or (icb.pull_if_not_cached and
get_cached_image_id(icb.remote_image) is None):
try:
pull_docker_image(icb.remote_image, conf.docker_pull_cmd)
except CalledProcessError:
pass
local_image = '{}:{}'.format(name, tag)
if (icb.skip_build_if_cached and
get_cached_image_id(icb.remote_image) is not None):
tag_docker_image(icb.remote_image, local_image)
return get_cached_image_id(local_image)
if ((not icb.allow_build_if_not_cached) and
get_cached_image_id(icb.remote_image) is None):
raise RuntimeError('No cached image for {}'.format(local_image))
return None | python | def handle_build_cache(
conf: Config, name: str, tag: str, icb: ImageCachingBehavior):
"""Handle Docker image build cache.
Return image ID if image is cached, and there's no need to redo the build.
Return None if need to build the image (whether cached locally or not).
Raise RuntimeError if not allowed to build the image because of state of
local cache.
TODO(itamar): figure out a better name for this function, that reflects
what it returns (e.g. `get_cached_image_id`),
without "surprising" the caller with the potential of long
and non-trivial operations that are not usually expected from functions
with such names.
"""
if icb.pull_if_cached or (icb.pull_if_not_cached and
get_cached_image_id(icb.remote_image) is None):
try:
pull_docker_image(icb.remote_image, conf.docker_pull_cmd)
except CalledProcessError:
pass
local_image = '{}:{}'.format(name, tag)
if (icb.skip_build_if_cached and
get_cached_image_id(icb.remote_image) is not None):
tag_docker_image(icb.remote_image, local_image)
return get_cached_image_id(local_image)
if ((not icb.allow_build_if_not_cached) and
get_cached_image_id(icb.remote_image) is None):
raise RuntimeError('No cached image for {}'.format(local_image))
return None | [
"def",
"handle_build_cache",
"(",
"conf",
":",
"Config",
",",
"name",
":",
"str",
",",
"tag",
":",
"str",
",",
"icb",
":",
"ImageCachingBehavior",
")",
":",
"if",
"icb",
".",
"pull_if_cached",
"or",
"(",
"icb",
".",
"pull_if_not_cached",
"and",
"get_cached... | Handle Docker image build cache.
Return image ID if image is cached, and there's no need to redo the build.
Return None if need to build the image (whether cached locally or not).
Raise RuntimeError if not allowed to build the image because of state of
local cache.
TODO(itamar): figure out a better name for this function, that reflects
what it returns (e.g. `get_cached_image_id`),
without "surprising" the caller with the potential of long
and non-trivial operations that are not usually expected from functions
with such names. | [
"Handle",
"Docker",
"image",
"build",
"cache",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/docker.py#L152-L181 | train | 44,106 |
resonai/ybt | yabt/logging.py | add_stream_handler | def add_stream_handler(logger, stream):
"""Add a brace-handler stream-handler using `stream` to `logger`."""
handler = logging.StreamHandler(stream=stream)
# Using Brace Formatter (see
# https://docs.python.org/3.5/howto/logging-cookbook.html#use-of-alternative-formatting-styles)
formatter = logging.Formatter(
'{asctime} {name:24s} {levelname:8s} {message}', style='{')
handler.setFormatter(formatter)
logger.addHandler(handler) | python | def add_stream_handler(logger, stream):
"""Add a brace-handler stream-handler using `stream` to `logger`."""
handler = logging.StreamHandler(stream=stream)
# Using Brace Formatter (see
# https://docs.python.org/3.5/howto/logging-cookbook.html#use-of-alternative-formatting-styles)
formatter = logging.Formatter(
'{asctime} {name:24s} {levelname:8s} {message}', style='{')
handler.setFormatter(formatter)
logger.addHandler(handler) | [
"def",
"add_stream_handler",
"(",
"logger",
",",
"stream",
")",
":",
"handler",
"=",
"logging",
".",
"StreamHandler",
"(",
"stream",
"=",
"stream",
")",
"# Using Brace Formatter (see",
"# https://docs.python.org/3.5/howto/logging-cookbook.html#use-of-alternative-formatting-styl... | Add a brace-handler stream-handler using `stream` to `logger`. | [
"Add",
"a",
"brace",
"-",
"handler",
"stream",
"-",
"handler",
"using",
"stream",
"to",
"logger",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/logging.py#L60-L68 | train | 44,107 |
resonai/ybt | yabt/logging.py | configure_logging | def configure_logging(conf):
"""Initialize and configure logging."""
root_logger = logging.getLogger()
root_logger.setLevel(getattr(logging, conf.loglevel.upper()))
if conf.logtostderr:
add_stream_handler(root_logger, sys.stderr)
if conf.logtostdout:
add_stream_handler(root_logger, sys.stdout) | python | def configure_logging(conf):
"""Initialize and configure logging."""
root_logger = logging.getLogger()
root_logger.setLevel(getattr(logging, conf.loglevel.upper()))
if conf.logtostderr:
add_stream_handler(root_logger, sys.stderr)
if conf.logtostdout:
add_stream_handler(root_logger, sys.stdout) | [
"def",
"configure_logging",
"(",
"conf",
")",
":",
"root_logger",
"=",
"logging",
".",
"getLogger",
"(",
")",
"root_logger",
".",
"setLevel",
"(",
"getattr",
"(",
"logging",
",",
"conf",
".",
"loglevel",
".",
"upper",
"(",
")",
")",
")",
"if",
"conf",
... | Initialize and configure logging. | [
"Initialize",
"and",
"configure",
"logging",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/logging.py#L71-L78 | train | 44,108 |
resonai/ybt | yabt/extend.py | Builder.register_sig | def register_sig(self, builder_name: str, sig: list, docstring: str,
cachable: bool=True, attempts=1):
"""Register a builder signature & docstring for `builder_name`.
The input for the builder signature is a list of "sig-spec"s
representing the builder function arguments.
Each sig-spec in the list can be:
1. A string. This represents a simple untyped positional argument name,
with no default value.
2. A 1-tuple with one string element. Same as #1.
3. A 2-tuple with ('arg-name', arg_type). This represents a typed
positional argument, if arg_type is an instance of PropType enum.
4. A 2-tuple with ('arg-name', default_value). This represents an
un-typed keyword argument with a default value.
5. A 3-tuple with ('arg-name', arg_type, default_value). This
represents a typed keyword argument with a default value,
if arg_type is an instance of PropType enum.
In addition to the args specified in the `sig` list, there are several
*injected* args:
1. A positional arg `name` of type TargetName is always the first arg.
2. A keyword arg `deps` of type TargetList and default value `None`
(or empty list) is always the first after all builder args.
3. A keyword arg `cachable` of type bool and default value taken from
the signature registration call (`cachable` arg).
4. A keyword arg `license` of type StrList and default value [].
5. A keyword arg `policies` of type StrList and default value [].
6. A keyword arg `packaging_params` of type dict and default value {}
(empty dict).
7. A keyword arg `runtime_params` of type dict and default value {}
(empty dict).
8. A keyword arg `build_params` of type dict and default value {}
(empty dict).
9. A keyword arg `attempts` of type int and default value 1.
"""
if self.sig is not None:
raise KeyError('{} already registered a signature!'
.format(builder_name))
self.sig = OrderedDict(name=ArgSpec(PropType.TargetName, Empty))
self.docstring = docstring
kwargs_section = False
for arg_spec in listify(sig):
arg_name, sig_spec = evaluate_arg_spec(arg_spec)
if arg_name in self.sig or arg_name in INJECTED_ARGS:
raise SyntaxError(
"duplicate argument '{}' in function definition"
.format(arg_name))
self.sig[arg_name] = sig_spec
if sig_spec.default == Empty:
if kwargs_section:
# TODO(itamar): how to give syntax error source annotation?
# (see: http://stackoverflow.com/questions/33717804)
raise SyntaxError(
'non-default argument follows default argument')
self.min_positional_args += 1
else:
kwargs_section = True
self.sig['deps'] = ArgSpec(PropType.TargetList, None)
self.sig['cachable'] = ArgSpec(PropType.bool, cachable)
self.sig['license'] = ArgSpec(PropType.StrList, None)
self.sig['policies'] = ArgSpec(PropType.StrList, None)
self.sig['packaging_params'] = ArgSpec(PropType.dict, None)
self.sig['runtime_params'] = ArgSpec(PropType.dict, None)
self.sig['build_params'] = ArgSpec(PropType.dict, None)
self.sig['attempts'] = ArgSpec(PropType.numeric, 1) | python | def register_sig(self, builder_name: str, sig: list, docstring: str,
cachable: bool=True, attempts=1):
"""Register a builder signature & docstring for `builder_name`.
The input for the builder signature is a list of "sig-spec"s
representing the builder function arguments.
Each sig-spec in the list can be:
1. A string. This represents a simple untyped positional argument name,
with no default value.
2. A 1-tuple with one string element. Same as #1.
3. A 2-tuple with ('arg-name', arg_type). This represents a typed
positional argument, if arg_type is an instance of PropType enum.
4. A 2-tuple with ('arg-name', default_value). This represents an
un-typed keyword argument with a default value.
5. A 3-tuple with ('arg-name', arg_type, default_value). This
represents a typed keyword argument with a default value,
if arg_type is an instance of PropType enum.
In addition to the args specified in the `sig` list, there are several
*injected* args:
1. A positional arg `name` of type TargetName is always the first arg.
2. A keyword arg `deps` of type TargetList and default value `None`
(or empty list) is always the first after all builder args.
3. A keyword arg `cachable` of type bool and default value taken from
the signature registration call (`cachable` arg).
4. A keyword arg `license` of type StrList and default value [].
5. A keyword arg `policies` of type StrList and default value [].
6. A keyword arg `packaging_params` of type dict and default value {}
(empty dict).
7. A keyword arg `runtime_params` of type dict and default value {}
(empty dict).
8. A keyword arg `build_params` of type dict and default value {}
(empty dict).
9. A keyword arg `attempts` of type int and default value 1.
"""
if self.sig is not None:
raise KeyError('{} already registered a signature!'
.format(builder_name))
self.sig = OrderedDict(name=ArgSpec(PropType.TargetName, Empty))
self.docstring = docstring
kwargs_section = False
for arg_spec in listify(sig):
arg_name, sig_spec = evaluate_arg_spec(arg_spec)
if arg_name in self.sig or arg_name in INJECTED_ARGS:
raise SyntaxError(
"duplicate argument '{}' in function definition"
.format(arg_name))
self.sig[arg_name] = sig_spec
if sig_spec.default == Empty:
if kwargs_section:
# TODO(itamar): how to give syntax error source annotation?
# (see: http://stackoverflow.com/questions/33717804)
raise SyntaxError(
'non-default argument follows default argument')
self.min_positional_args += 1
else:
kwargs_section = True
self.sig['deps'] = ArgSpec(PropType.TargetList, None)
self.sig['cachable'] = ArgSpec(PropType.bool, cachable)
self.sig['license'] = ArgSpec(PropType.StrList, None)
self.sig['policies'] = ArgSpec(PropType.StrList, None)
self.sig['packaging_params'] = ArgSpec(PropType.dict, None)
self.sig['runtime_params'] = ArgSpec(PropType.dict, None)
self.sig['build_params'] = ArgSpec(PropType.dict, None)
self.sig['attempts'] = ArgSpec(PropType.numeric, 1) | [
"def",
"register_sig",
"(",
"self",
",",
"builder_name",
":",
"str",
",",
"sig",
":",
"list",
",",
"docstring",
":",
"str",
",",
"cachable",
":",
"bool",
"=",
"True",
",",
"attempts",
"=",
"1",
")",
":",
"if",
"self",
".",
"sig",
"is",
"not",
"None... | Register a builder signature & docstring for `builder_name`.
The input for the builder signature is a list of "sig-spec"s
representing the builder function arguments.
Each sig-spec in the list can be:
1. A string. This represents a simple untyped positional argument name,
with no default value.
2. A 1-tuple with one string element. Same as #1.
3. A 2-tuple with ('arg-name', arg_type). This represents a typed
positional argument, if arg_type is an instance of PropType enum.
4. A 2-tuple with ('arg-name', default_value). This represents an
un-typed keyword argument with a default value.
5. A 3-tuple with ('arg-name', arg_type, default_value). This
represents a typed keyword argument with a default value,
if arg_type is an instance of PropType enum.
In addition to the args specified in the `sig` list, there are several
*injected* args:
1. A positional arg `name` of type TargetName is always the first arg.
2. A keyword arg `deps` of type TargetList and default value `None`
(or empty list) is always the first after all builder args.
3. A keyword arg `cachable` of type bool and default value taken from
the signature registration call (`cachable` arg).
4. A keyword arg `license` of type StrList and default value [].
5. A keyword arg `policies` of type StrList and default value [].
6. A keyword arg `packaging_params` of type dict and default value {}
(empty dict).
7. A keyword arg `runtime_params` of type dict and default value {}
(empty dict).
8. A keyword arg `build_params` of type dict and default value {}
(empty dict).
9. A keyword arg `attempts` of type int and default value 1. | [
"Register",
"a",
"builder",
"signature",
"&",
"docstring",
"for",
"builder_name",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/extend.py#L97-L162 | train | 44,109 |
resonai/ybt | yabt/extend.py | Plugin.remove_builder | def remove_builder(cls, builder_name: str):
"""Remove a registered builder `builder_name`.
No reason to use this except for tests.
"""
cls.builders.pop(builder_name, None)
for hook_spec in cls.hooks.values():
hook_spec.pop(builder_name, None) | python | def remove_builder(cls, builder_name: str):
"""Remove a registered builder `builder_name`.
No reason to use this except for tests.
"""
cls.builders.pop(builder_name, None)
for hook_spec in cls.hooks.values():
hook_spec.pop(builder_name, None) | [
"def",
"remove_builder",
"(",
"cls",
",",
"builder_name",
":",
"str",
")",
":",
"cls",
".",
"builders",
".",
"pop",
"(",
"builder_name",
",",
"None",
")",
"for",
"hook_spec",
"in",
"cls",
".",
"hooks",
".",
"values",
"(",
")",
":",
"hook_spec",
".",
... | Remove a registered builder `builder_name`.
No reason to use this except for tests. | [
"Remove",
"a",
"registered",
"builder",
"builder_name",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/extend.py#L198-L205 | train | 44,110 |
resonai/ybt | yabt/buildfile_utils.py | to_build_module | def to_build_module(build_file_path: str, conf: Config) -> str:
"""Return a normalized build module name for `build_file_path`."""
build_file = Path(build_file_path)
root = Path(conf.project_root)
return build_file.resolve().relative_to(root).parent.as_posix().strip('.') | python | def to_build_module(build_file_path: str, conf: Config) -> str:
"""Return a normalized build module name for `build_file_path`."""
build_file = Path(build_file_path)
root = Path(conf.project_root)
return build_file.resolve().relative_to(root).parent.as_posix().strip('.') | [
"def",
"to_build_module",
"(",
"build_file_path",
":",
"str",
",",
"conf",
":",
"Config",
")",
"->",
"str",
":",
"build_file",
"=",
"Path",
"(",
"build_file_path",
")",
"root",
"=",
"Path",
"(",
"conf",
".",
"project_root",
")",
"return",
"build_file",
"."... | Return a normalized build module name for `build_file_path`. | [
"Return",
"a",
"normalized",
"build",
"module",
"name",
"for",
"build_file_path",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/buildfile_utils.py#L29-L33 | train | 44,111 |
resonai/ybt | yabt/yabt.py | cmd_version | def cmd_version(unused_conf):
"""Print out version information about YABT and detected builders."""
import pkg_resources
print('This is {} version {}, imported from {}'
.format(__oneliner__, __version__, __file__))
if len(Plugin.builders) > 0:
print('setuptools registered builders:')
for entry_point in pkg_resources.iter_entry_points('yabt.builders'):
print(' {0.module_name}.{0.name} (dist {0.dist})'.format(entry_point)) | python | def cmd_version(unused_conf):
"""Print out version information about YABT and detected builders."""
import pkg_resources
print('This is {} version {}, imported from {}'
.format(__oneliner__, __version__, __file__))
if len(Plugin.builders) > 0:
print('setuptools registered builders:')
for entry_point in pkg_resources.iter_entry_points('yabt.builders'):
print(' {0.module_name}.{0.name} (dist {0.dist})'.format(entry_point)) | [
"def",
"cmd_version",
"(",
"unused_conf",
")",
":",
"import",
"pkg_resources",
"print",
"(",
"'This is {} version {}, imported from {}'",
".",
"format",
"(",
"__oneliner__",
",",
"__version__",
",",
"__file__",
")",
")",
"if",
"len",
"(",
"Plugin",
".",
"builders"... | Print out version information about YABT and detected builders. | [
"Print",
"out",
"version",
"information",
"about",
"YABT",
"and",
"detected",
"builders",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/yabt.py#L44-L52 | train | 44,112 |
resonai/ybt | yabt/yabt.py | cmd_list | def cmd_list(unused_conf: Config):
"""Print out information on loaded builders and hooks."""
for name, builder in sorted(Plugin.builders.items()):
if builder.func:
print('+- {0:16s} implemented in {1.__module__}.{1.__name__}()'
.format(name, builder.func))
else:
print('+- {0:16s} loaded with no builder function'.format(name))
for hook_name, hook_func in sorted(Plugin.get_hooks_for_builder(name)):
print(' +- {0} hook implemented in '
'{1.__module__}.{1.__name__}()'
.format(hook_name, hook_func)) | python | def cmd_list(unused_conf: Config):
"""Print out information on loaded builders and hooks."""
for name, builder in sorted(Plugin.builders.items()):
if builder.func:
print('+- {0:16s} implemented in {1.__module__}.{1.__name__}()'
.format(name, builder.func))
else:
print('+- {0:16s} loaded with no builder function'.format(name))
for hook_name, hook_func in sorted(Plugin.get_hooks_for_builder(name)):
print(' +- {0} hook implemented in '
'{1.__module__}.{1.__name__}()'
.format(hook_name, hook_func)) | [
"def",
"cmd_list",
"(",
"unused_conf",
":",
"Config",
")",
":",
"for",
"name",
",",
"builder",
"in",
"sorted",
"(",
"Plugin",
".",
"builders",
".",
"items",
"(",
")",
")",
":",
"if",
"builder",
".",
"func",
":",
"print",
"(",
"'+- {0:16s} implemented in ... | Print out information on loaded builders and hooks. | [
"Print",
"out",
"information",
"on",
"loaded",
"builders",
"and",
"hooks",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/yabt.py#L60-L71 | train | 44,113 |
resonai/ybt | yabt/yabt.py | cmd_build | def cmd_build(conf: Config, run_tests: bool=False):
"""Build requested targets, and their dependencies."""
build_context = BuildContext(conf)
populate_targets_graph(build_context, conf)
build_context.build_graph(run_tests=run_tests)
build_context.write_artifacts_metadata() | python | def cmd_build(conf: Config, run_tests: bool=False):
"""Build requested targets, and their dependencies."""
build_context = BuildContext(conf)
populate_targets_graph(build_context, conf)
build_context.build_graph(run_tests=run_tests)
build_context.write_artifacts_metadata() | [
"def",
"cmd_build",
"(",
"conf",
":",
"Config",
",",
"run_tests",
":",
"bool",
"=",
"False",
")",
":",
"build_context",
"=",
"BuildContext",
"(",
"conf",
")",
"populate_targets_graph",
"(",
"build_context",
",",
"conf",
")",
"build_context",
".",
"build_graph"... | Build requested targets, and their dependencies. | [
"Build",
"requested",
"targets",
"and",
"their",
"dependencies",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/yabt.py#L74-L79 | train | 44,114 |
resonai/ybt | yabt/yabt.py | main | def main():
"""Main `ybt` console script entry point - run YABT from command-line."""
conf = init_and_get_conf()
logger = make_logger(__name__)
logger.info('YaBT version {}', __version__)
handlers = {
'build': YabtCommand(func=cmd_build, requires_project=True),
'dot': YabtCommand(func=cmd_dot, requires_project=True),
'test': YabtCommand(func=cmd_test, requires_project=True),
'tree': YabtCommand(func=cmd_tree, requires_project=True),
'version': YabtCommand(func=cmd_version, requires_project=False),
'list-builders': YabtCommand(func=cmd_list, requires_project=False),
}
command = handlers[conf.cmd]
if command.requires_project and not conf.in_yabt_project():
fatal('Not a YABT project (or any of the parent directories): {}',
BUILD_PROJ_FILE)
try:
command.func(conf)
except Exception as ex:
fatal('{}', ex) | python | def main():
"""Main `ybt` console script entry point - run YABT from command-line."""
conf = init_and_get_conf()
logger = make_logger(__name__)
logger.info('YaBT version {}', __version__)
handlers = {
'build': YabtCommand(func=cmd_build, requires_project=True),
'dot': YabtCommand(func=cmd_dot, requires_project=True),
'test': YabtCommand(func=cmd_test, requires_project=True),
'tree': YabtCommand(func=cmd_tree, requires_project=True),
'version': YabtCommand(func=cmd_version, requires_project=False),
'list-builders': YabtCommand(func=cmd_list, requires_project=False),
}
command = handlers[conf.cmd]
if command.requires_project and not conf.in_yabt_project():
fatal('Not a YABT project (or any of the parent directories): {}',
BUILD_PROJ_FILE)
try:
command.func(conf)
except Exception as ex:
fatal('{}', ex) | [
"def",
"main",
"(",
")",
":",
"conf",
"=",
"init_and_get_conf",
"(",
")",
"logger",
"=",
"make_logger",
"(",
"__name__",
")",
"logger",
".",
"info",
"(",
"'YaBT version {}'",
",",
"__version__",
")",
"handlers",
"=",
"{",
"'build'",
":",
"YabtCommand",
"("... | Main `ybt` console script entry point - run YABT from command-line. | [
"Main",
"ybt",
"console",
"script",
"entry",
"point",
"-",
"run",
"YABT",
"from",
"command",
"-",
"line",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/yabt.py#L129-L149 | train | 44,115 |
resonai/ybt | yabt/builders/cpp.py | cpp_app_builder | def cpp_app_builder(build_context, target):
"""Pack a C++ binary as a Docker image with its runtime dependencies.
TODO(itamar): Dynamically analyze the binary and copy shared objects
from its buildenv image to the runtime image, unless they're installed.
"""
yprint(build_context.conf, 'Build CppApp', target)
if target.props.executable and target.props.main:
raise KeyError(
'`main` and `executable` arguments are mutually exclusive')
if target.props.executable:
if target.props.executable not in target.artifacts.get(AT.app):
target.artifacts.add(AT.app, target.props.executable)
entrypoint = [target.props.executable]
elif target.props.main:
prog = build_context.targets[target.props.main]
binary = list(prog.artifacts.get(AT.binary).keys())[0]
entrypoint = ['/usr/src/bin/' + binary]
else:
raise KeyError('Must specify either `main` or `executable` argument')
build_app_docker_and_bin(
build_context, target, entrypoint=entrypoint) | python | def cpp_app_builder(build_context, target):
"""Pack a C++ binary as a Docker image with its runtime dependencies.
TODO(itamar): Dynamically analyze the binary and copy shared objects
from its buildenv image to the runtime image, unless they're installed.
"""
yprint(build_context.conf, 'Build CppApp', target)
if target.props.executable and target.props.main:
raise KeyError(
'`main` and `executable` arguments are mutually exclusive')
if target.props.executable:
if target.props.executable not in target.artifacts.get(AT.app):
target.artifacts.add(AT.app, target.props.executable)
entrypoint = [target.props.executable]
elif target.props.main:
prog = build_context.targets[target.props.main]
binary = list(prog.artifacts.get(AT.binary).keys())[0]
entrypoint = ['/usr/src/bin/' + binary]
else:
raise KeyError('Must specify either `main` or `executable` argument')
build_app_docker_and_bin(
build_context, target, entrypoint=entrypoint) | [
"def",
"cpp_app_builder",
"(",
"build_context",
",",
"target",
")",
":",
"yprint",
"(",
"build_context",
".",
"conf",
",",
"'Build CppApp'",
",",
"target",
")",
"if",
"target",
".",
"props",
".",
"executable",
"and",
"target",
".",
"props",
".",
"main",
":... | Pack a C++ binary as a Docker image with its runtime dependencies.
TODO(itamar): Dynamically analyze the binary and copy shared objects
from its buildenv image to the runtime image, unless they're installed. | [
"Pack",
"a",
"C",
"++",
"binary",
"as",
"a",
"Docker",
"image",
"with",
"its",
"runtime",
"dependencies",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/builders/cpp.py#L163-L184 | train | 44,116 |
resonai/ybt | yabt/builders/cpp.py | make_pre_build_hook | def make_pre_build_hook(extra_compiler_config_params):
"""Return a pre-build hook function for C++ builders.
When called, during graph build, it computes and stores the compiler-config
object on the target, as well as adding it to the internal_dict prop for
hashing purposes.
"""
def pre_build_hook(build_context, target):
target.compiler_config = CompilerConfig(
build_context, target, extra_compiler_config_params)
target.props._internal_dict_['compiler_config'] = (
target.compiler_config.as_dict())
return pre_build_hook | python | def make_pre_build_hook(extra_compiler_config_params):
"""Return a pre-build hook function for C++ builders.
When called, during graph build, it computes and stores the compiler-config
object on the target, as well as adding it to the internal_dict prop for
hashing purposes.
"""
def pre_build_hook(build_context, target):
target.compiler_config = CompilerConfig(
build_context, target, extra_compiler_config_params)
target.props._internal_dict_['compiler_config'] = (
target.compiler_config.as_dict())
return pre_build_hook | [
"def",
"make_pre_build_hook",
"(",
"extra_compiler_config_params",
")",
":",
"def",
"pre_build_hook",
"(",
"build_context",
",",
"target",
")",
":",
"target",
".",
"compiler_config",
"=",
"CompilerConfig",
"(",
"build_context",
",",
"target",
",",
"extra_compiler_conf... | Return a pre-build hook function for C++ builders.
When called, during graph build, it computes and stores the compiler-config
object on the target, as well as adding it to the internal_dict prop for
hashing purposes. | [
"Return",
"a",
"pre",
"-",
"build",
"hook",
"function",
"for",
"C",
"++",
"builders",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/builders/cpp.py#L200-L214 | train | 44,117 |
resonai/ybt | yabt/builders/cpp.py | compile_cc | def compile_cc(build_context, compiler_config, buildenv, sources,
workspace_dir, buildenv_workspace, cmd_env):
"""Compile list of C++ source files in a buildenv image
and return list of generated object file.
"""
objects = []
for src in sources:
obj_rel_path = '{}.o'.format(splitext(src)[0])
obj_file = join(buildenv_workspace, obj_rel_path)
include_paths = [buildenv_workspace] + compiler_config.include_path
compile_cmd = (
[compiler_config.compiler, '-o', obj_file, '-c'] +
compiler_config.compile_flags +
['-I{}'.format(path) for path in include_paths] +
[join(buildenv_workspace, src)])
# TODO: capture and transform error messages from compiler so file
# paths match host paths for smooth(er) editor / IDE integration
build_context.run_in_buildenv(buildenv, compile_cmd, cmd_env)
objects.append(
join(relpath(workspace_dir, build_context.conf.project_root),
obj_rel_path))
return objects | python | def compile_cc(build_context, compiler_config, buildenv, sources,
workspace_dir, buildenv_workspace, cmd_env):
"""Compile list of C++ source files in a buildenv image
and return list of generated object file.
"""
objects = []
for src in sources:
obj_rel_path = '{}.o'.format(splitext(src)[0])
obj_file = join(buildenv_workspace, obj_rel_path)
include_paths = [buildenv_workspace] + compiler_config.include_path
compile_cmd = (
[compiler_config.compiler, '-o', obj_file, '-c'] +
compiler_config.compile_flags +
['-I{}'.format(path) for path in include_paths] +
[join(buildenv_workspace, src)])
# TODO: capture and transform error messages from compiler so file
# paths match host paths for smooth(er) editor / IDE integration
build_context.run_in_buildenv(buildenv, compile_cmd, cmd_env)
objects.append(
join(relpath(workspace_dir, build_context.conf.project_root),
obj_rel_path))
return objects | [
"def",
"compile_cc",
"(",
"build_context",
",",
"compiler_config",
",",
"buildenv",
",",
"sources",
",",
"workspace_dir",
",",
"buildenv_workspace",
",",
"cmd_env",
")",
":",
"objects",
"=",
"[",
"]",
"for",
"src",
"in",
"sources",
":",
"obj_rel_path",
"=",
... | Compile list of C++ source files in a buildenv image
and return list of generated object file. | [
"Compile",
"list",
"of",
"C",
"++",
"source",
"files",
"in",
"a",
"buildenv",
"image",
"and",
"return",
"list",
"of",
"generated",
"object",
"file",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/builders/cpp.py#L244-L265 | train | 44,118 |
resonai/ybt | yabt/builders/cpp.py | get_source_files | def get_source_files(target, build_context) -> list:
"""Return list of source files for `target`."""
all_sources = list(target.props.sources)
for proto_dep_name in target.props.protos:
proto_dep = build_context.targets[proto_dep_name]
all_sources.extend(proto_dep.artifacts.get(AT.gen_cc).keys())
return all_sources | python | def get_source_files(target, build_context) -> list:
"""Return list of source files for `target`."""
all_sources = list(target.props.sources)
for proto_dep_name in target.props.protos:
proto_dep = build_context.targets[proto_dep_name]
all_sources.extend(proto_dep.artifacts.get(AT.gen_cc).keys())
return all_sources | [
"def",
"get_source_files",
"(",
"target",
",",
"build_context",
")",
"->",
"list",
":",
"all_sources",
"=",
"list",
"(",
"target",
".",
"props",
".",
"sources",
")",
"for",
"proto_dep_name",
"in",
"target",
".",
"props",
".",
"protos",
":",
"proto_dep",
"=... | Return list of source files for `target`. | [
"Return",
"list",
"of",
"source",
"files",
"for",
"target",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/builders/cpp.py#L307-L313 | train | 44,119 |
resonai/ybt | yabt/builders/cpp.py | build_cpp | def build_cpp(build_context, target, compiler_config, workspace_dir):
"""Compile and link a C++ binary for `target`."""
rmtree(workspace_dir)
binary = join(*split(target.name))
objects = link_cpp_artifacts(build_context, target, workspace_dir, True)
buildenv_workspace = build_context.conf.host_to_buildenv_path(
workspace_dir)
objects.extend(compile_cc(
build_context, compiler_config, target.props.in_buildenv,
get_source_files(target, build_context), workspace_dir,
buildenv_workspace, target.props.cmd_env))
bin_file = join(buildenv_workspace, binary)
link_cmd = (
[compiler_config.linker, '-o', bin_file] +
objects + compiler_config.link_flags)
build_context.run_in_buildenv(
target.props.in_buildenv, link_cmd, target.props.cmd_env)
target.artifacts.add(AT.binary, relpath(join(workspace_dir, binary),
build_context.conf.project_root), binary) | python | def build_cpp(build_context, target, compiler_config, workspace_dir):
"""Compile and link a C++ binary for `target`."""
rmtree(workspace_dir)
binary = join(*split(target.name))
objects = link_cpp_artifacts(build_context, target, workspace_dir, True)
buildenv_workspace = build_context.conf.host_to_buildenv_path(
workspace_dir)
objects.extend(compile_cc(
build_context, compiler_config, target.props.in_buildenv,
get_source_files(target, build_context), workspace_dir,
buildenv_workspace, target.props.cmd_env))
bin_file = join(buildenv_workspace, binary)
link_cmd = (
[compiler_config.linker, '-o', bin_file] +
objects + compiler_config.link_flags)
build_context.run_in_buildenv(
target.props.in_buildenv, link_cmd, target.props.cmd_env)
target.artifacts.add(AT.binary, relpath(join(workspace_dir, binary),
build_context.conf.project_root), binary) | [
"def",
"build_cpp",
"(",
"build_context",
",",
"target",
",",
"compiler_config",
",",
"workspace_dir",
")",
":",
"rmtree",
"(",
"workspace_dir",
")",
"binary",
"=",
"join",
"(",
"*",
"split",
"(",
"target",
".",
"name",
")",
")",
"objects",
"=",
"link_cpp_... | Compile and link a C++ binary for `target`. | [
"Compile",
"and",
"link",
"a",
"C",
"++",
"binary",
"for",
"target",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/builders/cpp.py#L316-L334 | train | 44,120 |
resonai/ybt | yabt/builders/cpp.py | cpp_prog_builder | def cpp_prog_builder(build_context, target):
"""Build a C++ binary executable"""
yprint(build_context.conf, 'Build CppProg', target)
workspace_dir = build_context.get_workspace('CppProg', target.name)
build_cpp(build_context, target, target.compiler_config, workspace_dir) | python | def cpp_prog_builder(build_context, target):
"""Build a C++ binary executable"""
yprint(build_context.conf, 'Build CppProg', target)
workspace_dir = build_context.get_workspace('CppProg', target.name)
build_cpp(build_context, target, target.compiler_config, workspace_dir) | [
"def",
"cpp_prog_builder",
"(",
"build_context",
",",
"target",
")",
":",
"yprint",
"(",
"build_context",
".",
"conf",
",",
"'Build CppProg'",
",",
"target",
")",
"workspace_dir",
"=",
"build_context",
".",
"get_workspace",
"(",
"'CppProg'",
",",
"target",
".",
... | Build a C++ binary executable | [
"Build",
"a",
"C",
"++",
"binary",
"executable"
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/builders/cpp.py#L338-L342 | train | 44,121 |
resonai/ybt | yabt/builders/cpp.py | cpp_lib_builder | def cpp_lib_builder(build_context, target):
"""Build C++ object files"""
yprint(build_context.conf, 'Build CppLib', target)
workspace_dir = build_context.get_workspace('CppLib', target.name)
workspace_src_dir = join(workspace_dir, 'src')
rmtree(workspace_src_dir)
link_cpp_artifacts(build_context, target, workspace_src_dir, False)
buildenv_workspace = build_context.conf.host_to_buildenv_path(
workspace_src_dir)
objects = compile_cc(
build_context, target.compiler_config, target.props.in_buildenv,
get_source_files(target, build_context), workspace_src_dir,
buildenv_workspace, target.props.cmd_env)
for obj_file in objects:
target.artifacts.add(AT.object, obj_file) | python | def cpp_lib_builder(build_context, target):
"""Build C++ object files"""
yprint(build_context.conf, 'Build CppLib', target)
workspace_dir = build_context.get_workspace('CppLib', target.name)
workspace_src_dir = join(workspace_dir, 'src')
rmtree(workspace_src_dir)
link_cpp_artifacts(build_context, target, workspace_src_dir, False)
buildenv_workspace = build_context.conf.host_to_buildenv_path(
workspace_src_dir)
objects = compile_cc(
build_context, target.compiler_config, target.props.in_buildenv,
get_source_files(target, build_context), workspace_src_dir,
buildenv_workspace, target.props.cmd_env)
for obj_file in objects:
target.artifacts.add(AT.object, obj_file) | [
"def",
"cpp_lib_builder",
"(",
"build_context",
",",
"target",
")",
":",
"yprint",
"(",
"build_context",
".",
"conf",
",",
"'Build CppLib'",
",",
"target",
")",
"workspace_dir",
"=",
"build_context",
".",
"get_workspace",
"(",
"'CppLib'",
",",
"target",
".",
"... | Build C++ object files | [
"Build",
"C",
"++",
"object",
"files"
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/builders/cpp.py#L389-L403 | train | 44,122 |
resonai/ybt | yabt/policy.py | standard_licenses_only | def standard_licenses_only(build_context, target) -> str:
"""A policy function for allowing specifying only known licenses.
Return error message (string) if policy for `target` is violated,
otherwise return `None`.
To apply in project, include this function in the ilst returned by the
`get_policies` function implemented in the project `YSettings` file.
See example in tests/errors.
"""
for license_name in target.props.license:
if license_name not in KNOWN_LICENSES:
# TODO: include suggestion for similar known license
return 'Unknown license: {}'.format(license_name)
return None | python | def standard_licenses_only(build_context, target) -> str:
"""A policy function for allowing specifying only known licenses.
Return error message (string) if policy for `target` is violated,
otherwise return `None`.
To apply in project, include this function in the ilst returned by the
`get_policies` function implemented in the project `YSettings` file.
See example in tests/errors.
"""
for license_name in target.props.license:
if license_name not in KNOWN_LICENSES:
# TODO: include suggestion for similar known license
return 'Unknown license: {}'.format(license_name)
return None | [
"def",
"standard_licenses_only",
"(",
"build_context",
",",
"target",
")",
"->",
"str",
":",
"for",
"license_name",
"in",
"target",
".",
"props",
".",
"license",
":",
"if",
"license_name",
"not",
"in",
"KNOWN_LICENSES",
":",
"# TODO: include suggestion for similar k... | A policy function for allowing specifying only known licenses.
Return error message (string) if policy for `target` is violated,
otherwise return `None`.
To apply in project, include this function in the ilst returned by the
`get_policies` function implemented in the project `YSettings` file.
See example in tests/errors. | [
"A",
"policy",
"function",
"for",
"allowing",
"specifying",
"only",
"known",
"licenses",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/policy.py#L131-L146 | train | 44,123 |
resonai/ybt | yabt/policy.py | whitelist_licenses_policy | def whitelist_licenses_policy(policy_name: str, allowed_licenses: set):
"""A policy factory for making license-based whitelist policies.
To apply in project, include the function returned from this factory
in the ilst returned by the `get_policies` function implemented in the
project `YSettings` file.
The factory returns a policy function named
`whitelist_{policy_name}_licenses` that applies to targets with
`policy_name` in their policies list.
The returned policy asserts that all licenses contained in the target
(including through explicit & implicit dependencies) are in the whitelist
defined by `allowed_licenses`.
See example in tests/errors.
"""
def policy_func(build_context, target):
"""whitelist_{policy_name}_licenses policy function.
Return error message (string) if policy for `target` is violated,
otherwise return `None`.
"""
if policy_name in target.props.policies:
licenses = set(target.props.license)
for dep in build_context.generate_all_deps(target):
licenses.update(dep.props.license)
licenses.difference_update(allowed_licenses)
if licenses:
return 'Invalid licenses for {} policy: {}'.format(
policy_name, ', '.join(sorted(licenses)))
return None
policy_func.__name__ = 'whitelist_{}_licenses'.format(policy_name)
return policy_func | python | def whitelist_licenses_policy(policy_name: str, allowed_licenses: set):
"""A policy factory for making license-based whitelist policies.
To apply in project, include the function returned from this factory
in the ilst returned by the `get_policies` function implemented in the
project `YSettings` file.
The factory returns a policy function named
`whitelist_{policy_name}_licenses` that applies to targets with
`policy_name` in their policies list.
The returned policy asserts that all licenses contained in the target
(including through explicit & implicit dependencies) are in the whitelist
defined by `allowed_licenses`.
See example in tests/errors.
"""
def policy_func(build_context, target):
"""whitelist_{policy_name}_licenses policy function.
Return error message (string) if policy for `target` is violated,
otherwise return `None`.
"""
if policy_name in target.props.policies:
licenses = set(target.props.license)
for dep in build_context.generate_all_deps(target):
licenses.update(dep.props.license)
licenses.difference_update(allowed_licenses)
if licenses:
return 'Invalid licenses for {} policy: {}'.format(
policy_name, ', '.join(sorted(licenses)))
return None
policy_func.__name__ = 'whitelist_{}_licenses'.format(policy_name)
return policy_func | [
"def",
"whitelist_licenses_policy",
"(",
"policy_name",
":",
"str",
",",
"allowed_licenses",
":",
"set",
")",
":",
"def",
"policy_func",
"(",
"build_context",
",",
"target",
")",
":",
"\"\"\"whitelist_{policy_name}_licenses policy function.\n\n Return error message (st... | A policy factory for making license-based whitelist policies.
To apply in project, include the function returned from this factory
in the ilst returned by the `get_policies` function implemented in the
project `YSettings` file.
The factory returns a policy function named
`whitelist_{policy_name}_licenses` that applies to targets with
`policy_name` in their policies list.
The returned policy asserts that all licenses contained in the target
(including through explicit & implicit dependencies) are in the whitelist
defined by `allowed_licenses`.
See example in tests/errors. | [
"A",
"policy",
"factory",
"for",
"making",
"license",
"-",
"based",
"whitelist",
"policies",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/policy.py#L149-L183 | train | 44,124 |
resonai/ybt | yabt/cli.py | find_project_config_file | def find_project_config_file(project_root: str) -> str:
"""Return absolute path to project-specific config file, if it exists.
:param project_root: Absolute path to project root directory.
A project config file is a file named `YCONFIG_FILE` found at the top
level of the project root dir.
Return `None` if project root dir is not specified,
or if no such file is found.
"""
if project_root:
project_config_file = os.path.join(project_root, YCONFIG_FILE)
if os.path.isfile(project_config_file):
return project_config_file | python | def find_project_config_file(project_root: str) -> str:
"""Return absolute path to project-specific config file, if it exists.
:param project_root: Absolute path to project root directory.
A project config file is a file named `YCONFIG_FILE` found at the top
level of the project root dir.
Return `None` if project root dir is not specified,
or if no such file is found.
"""
if project_root:
project_config_file = os.path.join(project_root, YCONFIG_FILE)
if os.path.isfile(project_config_file):
return project_config_file | [
"def",
"find_project_config_file",
"(",
"project_root",
":",
"str",
")",
"->",
"str",
":",
"if",
"project_root",
":",
"project_config_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"project_root",
",",
"YCONFIG_FILE",
")",
"if",
"os",
".",
"path",
".",
"... | Return absolute path to project-specific config file, if it exists.
:param project_root: Absolute path to project root directory.
A project config file is a file named `YCONFIG_FILE` found at the top
level of the project root dir.
Return `None` if project root dir is not specified,
or if no such file is found. | [
"Return",
"absolute",
"path",
"to",
"project",
"-",
"specific",
"config",
"file",
"if",
"it",
"exists",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/cli.py#L134-L148 | train | 44,125 |
resonai/ybt | yabt/cli.py | get_user_settings_module | def get_user_settings_module(project_root: str):
"""Return project-specific user settings module, if it exists.
:param project_root: Absolute path to project root directory.
A project settings file is a file named `YSETTINGS_FILE` found at the top
level of the project root dir.
Return `None` if project root dir is not specified,
or if no such file is found.
Raise an exception if a file is found, but not importable.
The YSettings file can define 2 special module-level functions that
interact with the YABT CLI & config system:
1. `extend_cli`, if defined, takes the YABT `parser` object and may extend
it, to add custom command-line flags for the project.
(careful not to collide with YABT flags...)
2. `extend_config`, if defined, takes the YABT `config` object and the
parsed `args` object (returned by the the parser), and may extend the
config - should be used to reflect custom project CLI flags in the
config object.
Beyond that, the settings module is available in YBuild's under
`conf.settings` (except for the 2 special fucntions that are removed).
"""
if project_root:
project_settings_file = os.path.join(project_root, YSETTINGS_FILE)
if os.path.isfile(project_settings_file):
settings_loader = SourceFileLoader(
'settings', project_settings_file)
return settings_loader.load_module() | python | def get_user_settings_module(project_root: str):
"""Return project-specific user settings module, if it exists.
:param project_root: Absolute path to project root directory.
A project settings file is a file named `YSETTINGS_FILE` found at the top
level of the project root dir.
Return `None` if project root dir is not specified,
or if no such file is found.
Raise an exception if a file is found, but not importable.
The YSettings file can define 2 special module-level functions that
interact with the YABT CLI & config system:
1. `extend_cli`, if defined, takes the YABT `parser` object and may extend
it, to add custom command-line flags for the project.
(careful not to collide with YABT flags...)
2. `extend_config`, if defined, takes the YABT `config` object and the
parsed `args` object (returned by the the parser), and may extend the
config - should be used to reflect custom project CLI flags in the
config object.
Beyond that, the settings module is available in YBuild's under
`conf.settings` (except for the 2 special fucntions that are removed).
"""
if project_root:
project_settings_file = os.path.join(project_root, YSETTINGS_FILE)
if os.path.isfile(project_settings_file):
settings_loader = SourceFileLoader(
'settings', project_settings_file)
return settings_loader.load_module() | [
"def",
"get_user_settings_module",
"(",
"project_root",
":",
"str",
")",
":",
"if",
"project_root",
":",
"project_settings_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"project_root",
",",
"YSETTINGS_FILE",
")",
"if",
"os",
".",
"path",
".",
"isfile",
"(... | Return project-specific user settings module, if it exists.
:param project_root: Absolute path to project root directory.
A project settings file is a file named `YSETTINGS_FILE` found at the top
level of the project root dir.
Return `None` if project root dir is not specified,
or if no such file is found.
Raise an exception if a file is found, but not importable.
The YSettings file can define 2 special module-level functions that
interact with the YABT CLI & config system:
1. `extend_cli`, if defined, takes the YABT `parser` object and may extend
it, to add custom command-line flags for the project.
(careful not to collide with YABT flags...)
2. `extend_config`, if defined, takes the YABT `config` object and the
parsed `args` object (returned by the the parser), and may extend the
config - should be used to reflect custom project CLI flags in the
config object.
Beyond that, the settings module is available in YBuild's under
`conf.settings` (except for the 2 special fucntions that are removed). | [
"Return",
"project",
"-",
"specific",
"user",
"settings",
"module",
"if",
"it",
"exists",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/cli.py#L151-L182 | train | 44,126 |
resonai/ybt | yabt/cli.py | call_user_func | def call_user_func(settings_module, func_name, *args, **kwargs):
"""Call a user-supplied settings function and clean it up afterwards.
settings_module may be None, or the function may not exist.
If the function exists, it is called with the specified *args and **kwargs,
and the result is returned.
"""
if settings_module:
if hasattr(settings_module, func_name):
func = getattr(settings_module, func_name)
try:
return func(*args, **kwargs)
finally:
# cleanup user function
delattr(settings_module, func_name) | python | def call_user_func(settings_module, func_name, *args, **kwargs):
"""Call a user-supplied settings function and clean it up afterwards.
settings_module may be None, or the function may not exist.
If the function exists, it is called with the specified *args and **kwargs,
and the result is returned.
"""
if settings_module:
if hasattr(settings_module, func_name):
func = getattr(settings_module, func_name)
try:
return func(*args, **kwargs)
finally:
# cleanup user function
delattr(settings_module, func_name) | [
"def",
"call_user_func",
"(",
"settings_module",
",",
"func_name",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"settings_module",
":",
"if",
"hasattr",
"(",
"settings_module",
",",
"func_name",
")",
":",
"func",
"=",
"getattr",
"(",
"setting... | Call a user-supplied settings function and clean it up afterwards.
settings_module may be None, or the function may not exist.
If the function exists, it is called with the specified *args and **kwargs,
and the result is returned. | [
"Call",
"a",
"user",
"-",
"supplied",
"settings",
"function",
"and",
"clean",
"it",
"up",
"afterwards",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/cli.py#L185-L199 | train | 44,127 |
resonai/ybt | yabt/cli.py | get_build_flavor | def get_build_flavor(settings_module, args):
"""Update the flavor arg based on the settings API"""
known_flavors = listify(call_user_func(settings_module, 'known_flavors'))
if args.flavor:
if args.flavor not in known_flavors:
raise ValueError('Unknown build flavor: {}'.format(args.flavor))
else:
args.flavor = call_user_func(settings_module, 'default_flavor')
if args.flavor and args.flavor not in known_flavors:
raise ValueError(
'Unknown default build flavor: {}'.format(args.flavor)) | python | def get_build_flavor(settings_module, args):
"""Update the flavor arg based on the settings API"""
known_flavors = listify(call_user_func(settings_module, 'known_flavors'))
if args.flavor:
if args.flavor not in known_flavors:
raise ValueError('Unknown build flavor: {}'.format(args.flavor))
else:
args.flavor = call_user_func(settings_module, 'default_flavor')
if args.flavor and args.flavor not in known_flavors:
raise ValueError(
'Unknown default build flavor: {}'.format(args.flavor)) | [
"def",
"get_build_flavor",
"(",
"settings_module",
",",
"args",
")",
":",
"known_flavors",
"=",
"listify",
"(",
"call_user_func",
"(",
"settings_module",
",",
"'known_flavors'",
")",
")",
"if",
"args",
".",
"flavor",
":",
"if",
"args",
".",
"flavor",
"not",
... | Update the flavor arg based on the settings API | [
"Update",
"the",
"flavor",
"arg",
"based",
"on",
"the",
"settings",
"API"
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/cli.py#L202-L212 | train | 44,128 |
resonai/ybt | yabt/cli.py | init_and_get_conf | def init_and_get_conf(argv: list=None) -> Config:
"""Initialize a YABT CLI environment and return a Config instance.
:param argv: Manual override of command-line params to parse (for tests).
"""
colorama.init()
work_dir = os.path.abspath(os.curdir)
project_root = search_for_parent_dir(work_dir,
with_files=set([BUILD_PROJ_FILE]))
parser = make_parser(find_project_config_file(project_root))
settings_module = get_user_settings_module(project_root)
call_user_func(settings_module, 'extend_cli', parser)
argcomplete.autocomplete(parser)
args = parser.parse(argv)
get_build_flavor(settings_module, args)
config = Config(args, project_root, work_dir, settings_module)
config.common_conf = call_user_func(
config.settings, 'get_common_config', config, args)
config.flavor_conf = call_user_func(
config.settings, 'get_flavored_config', config, args)
call_user_func(config.settings, 'extend_config', config, args)
if not args.no_policies:
config.policies = listify(call_user_func(
config.settings, 'get_policies', config))
return config | python | def init_and_get_conf(argv: list=None) -> Config:
"""Initialize a YABT CLI environment and return a Config instance.
:param argv: Manual override of command-line params to parse (for tests).
"""
colorama.init()
work_dir = os.path.abspath(os.curdir)
project_root = search_for_parent_dir(work_dir,
with_files=set([BUILD_PROJ_FILE]))
parser = make_parser(find_project_config_file(project_root))
settings_module = get_user_settings_module(project_root)
call_user_func(settings_module, 'extend_cli', parser)
argcomplete.autocomplete(parser)
args = parser.parse(argv)
get_build_flavor(settings_module, args)
config = Config(args, project_root, work_dir, settings_module)
config.common_conf = call_user_func(
config.settings, 'get_common_config', config, args)
config.flavor_conf = call_user_func(
config.settings, 'get_flavored_config', config, args)
call_user_func(config.settings, 'extend_config', config, args)
if not args.no_policies:
config.policies = listify(call_user_func(
config.settings, 'get_policies', config))
return config | [
"def",
"init_and_get_conf",
"(",
"argv",
":",
"list",
"=",
"None",
")",
"->",
"Config",
":",
"colorama",
".",
"init",
"(",
")",
"work_dir",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"os",
".",
"curdir",
")",
"project_root",
"=",
"search_for_parent_dir... | Initialize a YABT CLI environment and return a Config instance.
:param argv: Manual override of command-line params to parse (for tests). | [
"Initialize",
"a",
"YABT",
"CLI",
"environment",
"and",
"return",
"a",
"Config",
"instance",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/cli.py#L215-L239 | train | 44,129 |
resonai/ybt | yabt/graph.py | stable_reverse_topological_sort | def stable_reverse_topological_sort(graph):
"""Return a list of nodes in topological sort order.
This topological sort is a **unique** permutation of the nodes
such that an edge from u to v implies that u appears before v in the
topological sort order.
Parameters
----------
graph : NetworkX digraph
A directed graph
Raises
------
NetworkXError
Topological sort is defined for directed graphs only. If the
graph G is undirected, a NetworkXError is raised.
NetworkXUnfeasible
If G is not a directed acyclic graph (DAG) no topological sort
exists and a NetworkXUnfeasible exception is raised.
Notes
-----
- This algorithm is based on a description and proof in
The Algorithm Design Manual [1]_ .
- This implementation is modified from networkx 1.11 implementation [2]_
to achieve stability, support only reverse (allows yielding instead of
returning a list), and remove the `nbunch` argument (had no use for it).
See also
--------
is_directed_acyclic_graph
References
----------
.. [1] Skiena, S. S. The Algorithm Design Manual (Springer-Verlag, 1998).
http://www.amazon.com/exec/obidos/ASIN/0387948600/ref=ase_thealgorithmrepo/
.. [2] networkx on GitHub
https://github.com/networkx/networkx/blob/8358afac209c00b7feb3e81c901098852a9413b3/networkx/algorithms/dag.py#L88-L168
"""
if not graph.is_directed():
raise networkx.NetworkXError(
'Topological sort not defined on undirected graphs.')
# nonrecursive version
seen = set()
explored = set()
for v in sorted(graph.nodes()):
if v in explored:
continue
fringe = [v] # nodes yet to look at
while fringe:
w = fringe[-1] # depth first search
if w in explored: # already looked down this branch
fringe.pop()
continue
seen.add(w) # mark as seen
# Check successors for cycles and for new nodes
new_nodes = []
for n in sorted(graph[w]):
if n not in explored:
if n in seen: # CYCLE!! OH NOOOO!!
raise networkx.NetworkXUnfeasible(
'Graph contains a cycle.')
new_nodes.append(n)
if new_nodes: # Add new_nodes to fringe
fringe.extend(new_nodes)
else: # No new nodes so w is fully explored
explored.add(w)
yield w
fringe.pop() | python | def stable_reverse_topological_sort(graph):
"""Return a list of nodes in topological sort order.
This topological sort is a **unique** permutation of the nodes
such that an edge from u to v implies that u appears before v in the
topological sort order.
Parameters
----------
graph : NetworkX digraph
A directed graph
Raises
------
NetworkXError
Topological sort is defined for directed graphs only. If the
graph G is undirected, a NetworkXError is raised.
NetworkXUnfeasible
If G is not a directed acyclic graph (DAG) no topological sort
exists and a NetworkXUnfeasible exception is raised.
Notes
-----
- This algorithm is based on a description and proof in
The Algorithm Design Manual [1]_ .
- This implementation is modified from networkx 1.11 implementation [2]_
to achieve stability, support only reverse (allows yielding instead of
returning a list), and remove the `nbunch` argument (had no use for it).
See also
--------
is_directed_acyclic_graph
References
----------
.. [1] Skiena, S. S. The Algorithm Design Manual (Springer-Verlag, 1998).
http://www.amazon.com/exec/obidos/ASIN/0387948600/ref=ase_thealgorithmrepo/
.. [2] networkx on GitHub
https://github.com/networkx/networkx/blob/8358afac209c00b7feb3e81c901098852a9413b3/networkx/algorithms/dag.py#L88-L168
"""
if not graph.is_directed():
raise networkx.NetworkXError(
'Topological sort not defined on undirected graphs.')
# nonrecursive version
seen = set()
explored = set()
for v in sorted(graph.nodes()):
if v in explored:
continue
fringe = [v] # nodes yet to look at
while fringe:
w = fringe[-1] # depth first search
if w in explored: # already looked down this branch
fringe.pop()
continue
seen.add(w) # mark as seen
# Check successors for cycles and for new nodes
new_nodes = []
for n in sorted(graph[w]):
if n not in explored:
if n in seen: # CYCLE!! OH NOOOO!!
raise networkx.NetworkXUnfeasible(
'Graph contains a cycle.')
new_nodes.append(n)
if new_nodes: # Add new_nodes to fringe
fringe.extend(new_nodes)
else: # No new nodes so w is fully explored
explored.add(w)
yield w
fringe.pop() | [
"def",
"stable_reverse_topological_sort",
"(",
"graph",
")",
":",
"if",
"not",
"graph",
".",
"is_directed",
"(",
")",
":",
"raise",
"networkx",
".",
"NetworkXError",
"(",
"'Topological sort not defined on undirected graphs.'",
")",
"# nonrecursive version",
"seen",
"=",... | Return a list of nodes in topological sort order.
This topological sort is a **unique** permutation of the nodes
such that an edge from u to v implies that u appears before v in the
topological sort order.
Parameters
----------
graph : NetworkX digraph
A directed graph
Raises
------
NetworkXError
Topological sort is defined for directed graphs only. If the
graph G is undirected, a NetworkXError is raised.
NetworkXUnfeasible
If G is not a directed acyclic graph (DAG) no topological sort
exists and a NetworkXUnfeasible exception is raised.
Notes
-----
- This algorithm is based on a description and proof in
The Algorithm Design Manual [1]_ .
- This implementation is modified from networkx 1.11 implementation [2]_
to achieve stability, support only reverse (allows yielding instead of
returning a list), and remove the `nbunch` argument (had no use for it).
See also
--------
is_directed_acyclic_graph
References
----------
.. [1] Skiena, S. S. The Algorithm Design Manual (Springer-Verlag, 1998).
http://www.amazon.com/exec/obidos/ASIN/0387948600/ref=ase_thealgorithmrepo/
.. [2] networkx on GitHub
https://github.com/networkx/networkx/blob/8358afac209c00b7feb3e81c901098852a9413b3/networkx/algorithms/dag.py#L88-L168 | [
"Return",
"a",
"list",
"of",
"nodes",
"in",
"topological",
"sort",
"order",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/graph.py#L42-L113 | train | 44,130 |
resonai/ybt | yabt/graph.py | raise_unresolved_targets | def raise_unresolved_targets(build_context, conf, unknown_seeds, seed_refs):
"""Raise error about unresolved targets during graph parsing."""
def format_target(target_name):
# TODO: suggest similar known target names
build_module = split_build_module(target_name)
return '{} (in {})'.format(target_name,
conf.get_build_file_path(build_module))
def format_unresolved(seed):
if seed not in seed_refs:
return seed
seed_ref = seed_refs[seed]
reasons = []
if seed_ref.on_cli:
reasons.append('seen on command line')
if seed_ref.from_default:
reasons.append('specified as default target in {}'
.format(conf.get_project_build_file))
if seed_ref.dep_of:
reasons.append(
'dependency of ' +
', '.join(format_target(target_name)
for target_name in sorted(seed_ref.dep_of)))
if seed_ref.buildenv_of:
reasons.append(
'buildenv of ' +
', '.join(format_target(target_name)
for target_name in sorted(seed_ref.buildenv_of)))
return '{} - {}'.format(seed, ', '.join(reasons))
unresolved_str = '\n'.join(format_unresolved(target_name)
for target_name in sorted(unknown_seeds))
num_target_str = '{} target'.format(len(unknown_seeds))
if len(unknown_seeds) > 1:
num_target_str += 's'
raise ValueError('Could not resolve {}:\n{}'
.format(num_target_str, unresolved_str)) | python | def raise_unresolved_targets(build_context, conf, unknown_seeds, seed_refs):
"""Raise error about unresolved targets during graph parsing."""
def format_target(target_name):
# TODO: suggest similar known target names
build_module = split_build_module(target_name)
return '{} (in {})'.format(target_name,
conf.get_build_file_path(build_module))
def format_unresolved(seed):
if seed not in seed_refs:
return seed
seed_ref = seed_refs[seed]
reasons = []
if seed_ref.on_cli:
reasons.append('seen on command line')
if seed_ref.from_default:
reasons.append('specified as default target in {}'
.format(conf.get_project_build_file))
if seed_ref.dep_of:
reasons.append(
'dependency of ' +
', '.join(format_target(target_name)
for target_name in sorted(seed_ref.dep_of)))
if seed_ref.buildenv_of:
reasons.append(
'buildenv of ' +
', '.join(format_target(target_name)
for target_name in sorted(seed_ref.buildenv_of)))
return '{} - {}'.format(seed, ', '.join(reasons))
unresolved_str = '\n'.join(format_unresolved(target_name)
for target_name in sorted(unknown_seeds))
num_target_str = '{} target'.format(len(unknown_seeds))
if len(unknown_seeds) > 1:
num_target_str += 's'
raise ValueError('Could not resolve {}:\n{}'
.format(num_target_str, unresolved_str)) | [
"def",
"raise_unresolved_targets",
"(",
"build_context",
",",
"conf",
",",
"unknown_seeds",
",",
"seed_refs",
")",
":",
"def",
"format_target",
"(",
"target_name",
")",
":",
"# TODO: suggest similar known target names",
"build_module",
"=",
"split_build_module",
"(",
"t... | Raise error about unresolved targets during graph parsing. | [
"Raise",
"error",
"about",
"unresolved",
"targets",
"during",
"graph",
"parsing",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/graph.py#L170-L208 | train | 44,131 |
resonai/ybt | yabt/scm.py | register_scm_provider | def register_scm_provider(scm_name: str):
"""Return a decorator for registering a SCM provider named `scm_name`."""
def register_decorator(scm_class: SourceControl):
"""Decorator for registering SCM provider."""
if scm_name in ScmManager.providers:
raise KeyError('{} already registered!'.format(scm_name))
ScmManager.providers[scm_name] = scm_class
SourceControl.register(scm_class)
logger.debug('Registered {0} SCM from {1.__module__}.{1.__name__}',
scm_name, scm_class)
return scm_class
return register_decorator | python | def register_scm_provider(scm_name: str):
"""Return a decorator for registering a SCM provider named `scm_name`."""
def register_decorator(scm_class: SourceControl):
"""Decorator for registering SCM provider."""
if scm_name in ScmManager.providers:
raise KeyError('{} already registered!'.format(scm_name))
ScmManager.providers[scm_name] = scm_class
SourceControl.register(scm_class)
logger.debug('Registered {0} SCM from {1.__module__}.{1.__name__}',
scm_name, scm_class)
return scm_class
return register_decorator | [
"def",
"register_scm_provider",
"(",
"scm_name",
":",
"str",
")",
":",
"def",
"register_decorator",
"(",
"scm_class",
":",
"SourceControl",
")",
":",
"\"\"\"Decorator for registering SCM provider.\"\"\"",
"if",
"scm_name",
"in",
"ScmManager",
".",
"providers",
":",
"r... | Return a decorator for registering a SCM provider named `scm_name`. | [
"Return",
"a",
"decorator",
"for",
"registering",
"a",
"SCM",
"provider",
"named",
"scm_name",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/scm.py#L83-L95 | train | 44,132 |
resonai/ybt | yabt/scm.py | ScmManager.get_provider | def get_provider(cls, scm_name: str, conf) -> SourceControl:
"""Load and return named SCM provider instance.
:param conf: A yabt.config.Config object used to initialize the SCM
provider instance.
:raises KeyError: If no SCM provider with name `scm_name` registered.
"""
for entry_point in pkg_resources.iter_entry_points('yabt.scm',
scm_name):
entry_point.load()
logger.debug('Loaded SCM provider {0.name} from {0.module_name} '
'(dist {0.dist})', entry_point)
logger.debug('Loaded {} SCM providers', len(cls.providers))
if scm_name not in cls.providers:
raise KeyError('Unknown SCM identifier {}'.format(scm_name))
return cls.providers[scm_name](conf) | python | def get_provider(cls, scm_name: str, conf) -> SourceControl:
"""Load and return named SCM provider instance.
:param conf: A yabt.config.Config object used to initialize the SCM
provider instance.
:raises KeyError: If no SCM provider with name `scm_name` registered.
"""
for entry_point in pkg_resources.iter_entry_points('yabt.scm',
scm_name):
entry_point.load()
logger.debug('Loaded SCM provider {0.name} from {0.module_name} '
'(dist {0.dist})', entry_point)
logger.debug('Loaded {} SCM providers', len(cls.providers))
if scm_name not in cls.providers:
raise KeyError('Unknown SCM identifier {}'.format(scm_name))
return cls.providers[scm_name](conf) | [
"def",
"get_provider",
"(",
"cls",
",",
"scm_name",
":",
"str",
",",
"conf",
")",
"->",
"SourceControl",
":",
"for",
"entry_point",
"in",
"pkg_resources",
".",
"iter_entry_points",
"(",
"'yabt.scm'",
",",
"scm_name",
")",
":",
"entry_point",
".",
"load",
"("... | Load and return named SCM provider instance.
:param conf: A yabt.config.Config object used to initialize the SCM
provider instance.
:raises KeyError: If no SCM provider with name `scm_name` registered. | [
"Load",
"and",
"return",
"named",
"SCM",
"provider",
"instance",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/scm.py#L58-L74 | train | 44,133 |
resonai/ybt | yabt/dot.py | write_dot | def write_dot(build_context, conf: Config, out_f):
"""Write build graph in dot format to `out_f` file-like object."""
not_buildenv_targets = get_not_buildenv_targets(build_context)
prebuilt_targets = get_prebuilt_targets(build_context)
out_f.write('strict digraph {\n')
for node in build_context.target_graph.nodes:
if conf.show_buildenv_deps or node in not_buildenv_targets:
cached = node in prebuilt_targets
fillcolor = 'fillcolor="grey",style=filled' if cached else ''
color = TARGETS_COLORS.get(
build_context.targets[node].builder_name, 'black')
out_f.write(' "{}" [color="{}",{}];\n'.format(node, color,
fillcolor))
out_f.writelines(' "{}" -> "{}";\n'.format(u, v)
for u, v in build_context.target_graph.edges
if conf.show_buildenv_deps or
(u in not_buildenv_targets and v in not_buildenv_targets))
out_f.write('}\n\n') | python | def write_dot(build_context, conf: Config, out_f):
"""Write build graph in dot format to `out_f` file-like object."""
not_buildenv_targets = get_not_buildenv_targets(build_context)
prebuilt_targets = get_prebuilt_targets(build_context)
out_f.write('strict digraph {\n')
for node in build_context.target_graph.nodes:
if conf.show_buildenv_deps or node in not_buildenv_targets:
cached = node in prebuilt_targets
fillcolor = 'fillcolor="grey",style=filled' if cached else ''
color = TARGETS_COLORS.get(
build_context.targets[node].builder_name, 'black')
out_f.write(' "{}" [color="{}",{}];\n'.format(node, color,
fillcolor))
out_f.writelines(' "{}" -> "{}";\n'.format(u, v)
for u, v in build_context.target_graph.edges
if conf.show_buildenv_deps or
(u in not_buildenv_targets and v in not_buildenv_targets))
out_f.write('}\n\n') | [
"def",
"write_dot",
"(",
"build_context",
",",
"conf",
":",
"Config",
",",
"out_f",
")",
":",
"not_buildenv_targets",
"=",
"get_not_buildenv_targets",
"(",
"build_context",
")",
"prebuilt_targets",
"=",
"get_prebuilt_targets",
"(",
"build_context",
")",
"out_f",
"."... | Write build graph in dot format to `out_f` file-like object. | [
"Write",
"build",
"graph",
"in",
"dot",
"format",
"to",
"out_f",
"file",
"-",
"like",
"object",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/dot.py#L62-L79 | train | 44,134 |
resonai/ybt | yabt/buildcontext.py | BuildContext.get_workspace | def get_workspace(self, *parts) -> str:
"""Return a path to a private workspace dir.
Create sub-tree of dirs using strings from `parts` inside workspace,
and return full path to innermost directory.
Upon returning successfully, the directory will exist (potentially
changed to a safe FS name), even if it didn't exist before, including
any intermediate parent directories.
"""
workspace_dir = os.path.join(self.conf.get_workspace_path(),
*(get_safe_path(part) for part in parts))
if not os.path.isdir(workspace_dir):
# exist_ok=True in case of concurrent creation of the same dir
os.makedirs(workspace_dir, exist_ok=True)
return workspace_dir | python | def get_workspace(self, *parts) -> str:
"""Return a path to a private workspace dir.
Create sub-tree of dirs using strings from `parts` inside workspace,
and return full path to innermost directory.
Upon returning successfully, the directory will exist (potentially
changed to a safe FS name), even if it didn't exist before, including
any intermediate parent directories.
"""
workspace_dir = os.path.join(self.conf.get_workspace_path(),
*(get_safe_path(part) for part in parts))
if not os.path.isdir(workspace_dir):
# exist_ok=True in case of concurrent creation of the same dir
os.makedirs(workspace_dir, exist_ok=True)
return workspace_dir | [
"def",
"get_workspace",
"(",
"self",
",",
"*",
"parts",
")",
"->",
"str",
":",
"workspace_dir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"conf",
".",
"get_workspace_path",
"(",
")",
",",
"*",
"(",
"get_safe_path",
"(",
"part",
")",
"for... | Return a path to a private workspace dir.
Create sub-tree of dirs using strings from `parts` inside workspace,
and return full path to innermost directory.
Upon returning successfully, the directory will exist (potentially
changed to a safe FS name), even if it didn't exist before, including
any intermediate parent directories. | [
"Return",
"a",
"path",
"to",
"a",
"private",
"workspace",
"dir",
".",
"Create",
"sub",
"-",
"tree",
"of",
"dirs",
"using",
"strings",
"from",
"parts",
"inside",
"workspace",
"and",
"return",
"full",
"path",
"to",
"innermost",
"directory",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/buildcontext.py#L101-L115 | train | 44,135 |
resonai/ybt | yabt/buildcontext.py | BuildContext.get_bin_dir | def get_bin_dir(self, build_module: str) -> str:
"""Return a path to the binaries dir for a build module dir.
Create sub-tree of missing dirs as needed, and return full path
to innermost directory.
"""
bin_dir = os.path.join(self.conf.get_bin_path(), build_module)
if not os.path.isdir(bin_dir):
# exist_ok=True in case of concurrent creation of the same dir
os.makedirs(bin_dir, exist_ok=True)
return bin_dir | python | def get_bin_dir(self, build_module: str) -> str:
"""Return a path to the binaries dir for a build module dir.
Create sub-tree of missing dirs as needed, and return full path
to innermost directory.
"""
bin_dir = os.path.join(self.conf.get_bin_path(), build_module)
if not os.path.isdir(bin_dir):
# exist_ok=True in case of concurrent creation of the same dir
os.makedirs(bin_dir, exist_ok=True)
return bin_dir | [
"def",
"get_bin_dir",
"(",
"self",
",",
"build_module",
":",
"str",
")",
"->",
"str",
":",
"bin_dir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"conf",
".",
"get_bin_path",
"(",
")",
",",
"build_module",
")",
"if",
"not",
"os",
".",
"p... | Return a path to the binaries dir for a build module dir.
Create sub-tree of missing dirs as needed, and return full path
to innermost directory. | [
"Return",
"a",
"path",
"to",
"the",
"binaries",
"dir",
"for",
"a",
"build",
"module",
"dir",
".",
"Create",
"sub",
"-",
"tree",
"of",
"missing",
"dirs",
"as",
"needed",
"and",
"return",
"full",
"path",
"to",
"innermost",
"directory",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/buildcontext.py#L117-L126 | train | 44,136 |
resonai/ybt | yabt/buildcontext.py | BuildContext.walk_target_deps_topological_order | def walk_target_deps_topological_order(self, target: Target):
"""Generate all dependencies of `target` by topological sort order."""
all_deps = get_descendants(self.target_graph, target.name)
for dep_name in topological_sort(self.target_graph):
if dep_name in all_deps:
yield self.targets[dep_name] | python | def walk_target_deps_topological_order(self, target: Target):
"""Generate all dependencies of `target` by topological sort order."""
all_deps = get_descendants(self.target_graph, target.name)
for dep_name in topological_sort(self.target_graph):
if dep_name in all_deps:
yield self.targets[dep_name] | [
"def",
"walk_target_deps_topological_order",
"(",
"self",
",",
"target",
":",
"Target",
")",
":",
"all_deps",
"=",
"get_descendants",
"(",
"self",
".",
"target_graph",
",",
"target",
".",
"name",
")",
"for",
"dep_name",
"in",
"topological_sort",
"(",
"self",
"... | Generate all dependencies of `target` by topological sort order. | [
"Generate",
"all",
"dependencies",
"of",
"target",
"by",
"topological",
"sort",
"order",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/buildcontext.py#L128-L133 | train | 44,137 |
resonai/ybt | yabt/buildcontext.py | BuildContext.generate_direct_deps | def generate_direct_deps(self, target: Target):
"""Generate only direct dependencies of `target`."""
yield from (self.targets[dep_name] for dep_name in sorted(target.deps)) | python | def generate_direct_deps(self, target: Target):
"""Generate only direct dependencies of `target`."""
yield from (self.targets[dep_name] for dep_name in sorted(target.deps)) | [
"def",
"generate_direct_deps",
"(",
"self",
",",
"target",
":",
"Target",
")",
":",
"yield",
"from",
"(",
"self",
".",
"targets",
"[",
"dep_name",
"]",
"for",
"dep_name",
"in",
"sorted",
"(",
"target",
".",
"deps",
")",
")"
] | Generate only direct dependencies of `target`. | [
"Generate",
"only",
"direct",
"dependencies",
"of",
"target",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/buildcontext.py#L135-L137 | train | 44,138 |
resonai/ybt | yabt/buildcontext.py | BuildContext.register_target | def register_target(self, target: Target):
"""Register a `target` instance in this build context.
A registered target is saved in the `targets` map and in the
`targets_by_module` map, but is not added to the target graph until
target extraction is completed (thread safety considerations).
"""
if target.name in self.targets:
first = self.targets[target.name]
raise NameError(
'Target with name "{0.name}" ({0.builder_name} from module '
'"{1}") already exists - defined first as '
'{2.builder_name} in module "{3}"'.format(
target, split_build_module(target.name),
first, split_build_module(first.name)))
self.targets[target.name] = target
self.targets_by_module[split_build_module(target.name)].add(
target.name) | python | def register_target(self, target: Target):
"""Register a `target` instance in this build context.
A registered target is saved in the `targets` map and in the
`targets_by_module` map, but is not added to the target graph until
target extraction is completed (thread safety considerations).
"""
if target.name in self.targets:
first = self.targets[target.name]
raise NameError(
'Target with name "{0.name}" ({0.builder_name} from module '
'"{1}") already exists - defined first as '
'{2.builder_name} in module "{3}"'.format(
target, split_build_module(target.name),
first, split_build_module(first.name)))
self.targets[target.name] = target
self.targets_by_module[split_build_module(target.name)].add(
target.name) | [
"def",
"register_target",
"(",
"self",
",",
"target",
":",
"Target",
")",
":",
"if",
"target",
".",
"name",
"in",
"self",
".",
"targets",
":",
"first",
"=",
"self",
".",
"targets",
"[",
"target",
".",
"name",
"]",
"raise",
"NameError",
"(",
"'Target wi... | Register a `target` instance in this build context.
A registered target is saved in the `targets` map and in the
`targets_by_module` map, but is not added to the target graph until
target extraction is completed (thread safety considerations). | [
"Register",
"a",
"target",
"instance",
"in",
"this",
"build",
"context",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/buildcontext.py#L148-L165 | train | 44,139 |
resonai/ybt | yabt/buildcontext.py | BuildContext.get_target_extraction_context | def get_target_extraction_context(self, build_file_path: str) -> dict:
"""Return a build file parser target extraction context.
The target extraction context is a build-file-specific mapping from
builder-name to target extraction function,
for every registered builder.
"""
extraction_context = {}
for name, builder in Plugin.builders.items():
extraction_context[name] = extractor(name, builder,
build_file_path, self)
return extraction_context | python | def get_target_extraction_context(self, build_file_path: str) -> dict:
"""Return a build file parser target extraction context.
The target extraction context is a build-file-specific mapping from
builder-name to target extraction function,
for every registered builder.
"""
extraction_context = {}
for name, builder in Plugin.builders.items():
extraction_context[name] = extractor(name, builder,
build_file_path, self)
return extraction_context | [
"def",
"get_target_extraction_context",
"(",
"self",
",",
"build_file_path",
":",
"str",
")",
"->",
"dict",
":",
"extraction_context",
"=",
"{",
"}",
"for",
"name",
",",
"builder",
"in",
"Plugin",
".",
"builders",
".",
"items",
"(",
")",
":",
"extraction_con... | Return a build file parser target extraction context.
The target extraction context is a build-file-specific mapping from
builder-name to target extraction function,
for every registered builder. | [
"Return",
"a",
"build",
"file",
"parser",
"target",
"extraction",
"context",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/buildcontext.py#L183-L194 | train | 44,140 |
resonai/ybt | yabt/buildcontext.py | BuildContext.get_buildenv_graph | def get_buildenv_graph(self):
"""Return a graph induced by buildenv nodes"""
# This implementation first obtains all subsets of nodes that all
# buildenvs depend on, and then builds a subgraph induced by the union
# of these subsets. This can be very non-optimal.
# TODO(itamar): Reimplement efficient algo, or redesign buildenvs
buildenvs = set(target.buildenv for target in self.targets.values()
if target.buildenv)
return nx.DiGraph(self.target_graph.subgraph(reduce(
lambda x, y: x | set(y),
(get_descendants(self.target_graph, buildenv)
for buildenv in buildenvs), buildenvs))) | python | def get_buildenv_graph(self):
"""Return a graph induced by buildenv nodes"""
# This implementation first obtains all subsets of nodes that all
# buildenvs depend on, and then builds a subgraph induced by the union
# of these subsets. This can be very non-optimal.
# TODO(itamar): Reimplement efficient algo, or redesign buildenvs
buildenvs = set(target.buildenv for target in self.targets.values()
if target.buildenv)
return nx.DiGraph(self.target_graph.subgraph(reduce(
lambda x, y: x | set(y),
(get_descendants(self.target_graph, buildenv)
for buildenv in buildenvs), buildenvs))) | [
"def",
"get_buildenv_graph",
"(",
"self",
")",
":",
"# This implementation first obtains all subsets of nodes that all",
"# buildenvs depend on, and then builds a subgraph induced by the union",
"# of these subsets. This can be very non-optimal.",
"# TODO(itamar): Reimplement efficient algo, or re... | Return a graph induced by buildenv nodes | [
"Return",
"a",
"graph",
"induced",
"by",
"buildenv",
"nodes"
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/buildcontext.py#L200-L211 | train | 44,141 |
resonai/ybt | yabt/buildcontext.py | BuildContext.ready_nodes_iter | def ready_nodes_iter(self, graph_copy):
"""Generate ready targets from the graph `graph_copy`.
The input graph is mutated by this method, so it has to be a mutable
copy of the graph (e.g. not original copy, or read-only view).
Caller **must** call `done()` after processing every generated
target, so additional ready targets can be added to the queue.
The invariant: a target may be yielded from this generator only
after all its descendant targets were notified "done".
"""
def is_ready(target_name):
"""Return True if the node `target_name` is "ready" in the graph
`graph_copy`.
"Ready" means that the graph doesn't contain any more nodes that
`target_name` depends on (e.g. it has no successors).
"""
try:
next(graph_copy.successors(target_name))
except StopIteration:
return True
return False
ready_nodes = deque(sorted(
target_name for target_name in graph_copy.nodes
if is_ready(target_name)))
produced_event = threading.Event()
failed_event = threading.Event()
def make_done_callback(target: Target):
"""Return a callable "done" notifier to
report a target as processed."""
def done_notifier():
"""Mark target as done, adding new ready nodes to queue"""
if graph_copy.has_node(target.name):
affected_nodes = list(sorted(
graph_copy.predecessors(target.name)))
graph_copy.remove_node(target.name)
ready_nodes.extend(
target_name for target_name in affected_nodes
if is_ready(target_name))
produced_event.set()
return done_notifier
def make_retry_callback(target: Target):
"""Return a callable "retry" notifier to
report a target as in need of retry.
Currently for tests we rebuild the target
when it's not necessary."""
def retry_notifier():
"""Mark target as retry, re-entering node to end of queue"""
if graph_copy.has_node(target.name):
ready_nodes.append(target.name)
produced_event.set()
return retry_notifier
def make_fail_callback(target: Target):
"""Return a callable "fail" notifier to
report a target as failed after all retries."""
def fail_notifier(ex):
"""Mark target as failed, taking it and ancestors
out of the queue"""
# TODO(Dana) separate "failed to build target" errors from
# "failed to run" errors.
# see: https://github.com/resonai/ybt/issues/124
if isinstance(ex, CalledProcessError):
sys.stdout.write(ex.stdout.decode('utf-8'))
sys.stderr.write(ex.stderr.decode('utf-8'))
if graph_copy.has_node(target.name):
self.failed_nodes[target.name] = ex
# removing all ancestors (nodes that depend on this one)
affected_nodes = get_ancestors(graph_copy, target.name)
graph_copy.remove_node(target.name)
for affected_node in affected_nodes:
if affected_node in self.skipped_nodes:
continue
if graph_copy.has_node(affected_node):
self.skipped_nodes.append(affected_node)
graph_copy.remove_node(affected_node)
if self.conf.continue_after_fail:
logger.info('Failed target: {} due to error: {}',
target.name, ex)
produced_event.set()
else:
failed_event.set()
fatal('`{}\': {}', target.name, ex)
return fail_notifier
while True:
while len(ready_nodes) == 0:
if graph_copy.order() == 0:
return
if failed_event.is_set():
return
produced_event.wait(0.5)
produced_event.clear()
next_node = ready_nodes.popleft()
node = self.targets[next_node]
node.done = make_done_callback(node)
# TODO(bergden) retry assumes no need to update predecessors:
# This means we don't support retries for targets that are
# prerequisites of other targets (builds, installs)
node.retry = make_retry_callback(node)
node.fail = make_fail_callback(node)
yield node | python | def ready_nodes_iter(self, graph_copy):
"""Generate ready targets from the graph `graph_copy`.
The input graph is mutated by this method, so it has to be a mutable
copy of the graph (e.g. not original copy, or read-only view).
Caller **must** call `done()` after processing every generated
target, so additional ready targets can be added to the queue.
The invariant: a target may be yielded from this generator only
after all its descendant targets were notified "done".
"""
def is_ready(target_name):
"""Return True if the node `target_name` is "ready" in the graph
`graph_copy`.
"Ready" means that the graph doesn't contain any more nodes that
`target_name` depends on (e.g. it has no successors).
"""
try:
next(graph_copy.successors(target_name))
except StopIteration:
return True
return False
ready_nodes = deque(sorted(
target_name for target_name in graph_copy.nodes
if is_ready(target_name)))
produced_event = threading.Event()
failed_event = threading.Event()
def make_done_callback(target: Target):
"""Return a callable "done" notifier to
report a target as processed."""
def done_notifier():
"""Mark target as done, adding new ready nodes to queue"""
if graph_copy.has_node(target.name):
affected_nodes = list(sorted(
graph_copy.predecessors(target.name)))
graph_copy.remove_node(target.name)
ready_nodes.extend(
target_name for target_name in affected_nodes
if is_ready(target_name))
produced_event.set()
return done_notifier
def make_retry_callback(target: Target):
"""Return a callable "retry" notifier to
report a target as in need of retry.
Currently for tests we rebuild the target
when it's not necessary."""
def retry_notifier():
"""Mark target as retry, re-entering node to end of queue"""
if graph_copy.has_node(target.name):
ready_nodes.append(target.name)
produced_event.set()
return retry_notifier
def make_fail_callback(target: Target):
"""Return a callable "fail" notifier to
report a target as failed after all retries."""
def fail_notifier(ex):
"""Mark target as failed, taking it and ancestors
out of the queue"""
# TODO(Dana) separate "failed to build target" errors from
# "failed to run" errors.
# see: https://github.com/resonai/ybt/issues/124
if isinstance(ex, CalledProcessError):
sys.stdout.write(ex.stdout.decode('utf-8'))
sys.stderr.write(ex.stderr.decode('utf-8'))
if graph_copy.has_node(target.name):
self.failed_nodes[target.name] = ex
# removing all ancestors (nodes that depend on this one)
affected_nodes = get_ancestors(graph_copy, target.name)
graph_copy.remove_node(target.name)
for affected_node in affected_nodes:
if affected_node in self.skipped_nodes:
continue
if graph_copy.has_node(affected_node):
self.skipped_nodes.append(affected_node)
graph_copy.remove_node(affected_node)
if self.conf.continue_after_fail:
logger.info('Failed target: {} due to error: {}',
target.name, ex)
produced_event.set()
else:
failed_event.set()
fatal('`{}\': {}', target.name, ex)
return fail_notifier
while True:
while len(ready_nodes) == 0:
if graph_copy.order() == 0:
return
if failed_event.is_set():
return
produced_event.wait(0.5)
produced_event.clear()
next_node = ready_nodes.popleft()
node = self.targets[next_node]
node.done = make_done_callback(node)
# TODO(bergden) retry assumes no need to update predecessors:
# This means we don't support retries for targets that are
# prerequisites of other targets (builds, installs)
node.retry = make_retry_callback(node)
node.fail = make_fail_callback(node)
yield node | [
"def",
"ready_nodes_iter",
"(",
"self",
",",
"graph_copy",
")",
":",
"def",
"is_ready",
"(",
"target_name",
")",
":",
"\"\"\"Return True if the node `target_name` is \"ready\" in the graph\n `graph_copy`.\n\n \"Ready\" means that the graph doesn't contain any mor... | Generate ready targets from the graph `graph_copy`.
The input graph is mutated by this method, so it has to be a mutable
copy of the graph (e.g. not original copy, or read-only view).
Caller **must** call `done()` after processing every generated
target, so additional ready targets can be added to the queue.
The invariant: a target may be yielded from this generator only
after all its descendant targets were notified "done". | [
"Generate",
"ready",
"targets",
"from",
"the",
"graph",
"graph_copy",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/buildcontext.py#L213-L326 | train | 44,142 |
resonai/ybt | yabt/buildcontext.py | BuildContext.run_in_buildenv | def run_in_buildenv(
self, buildenv_target_name: str, cmd: list, cmd_env: dict=None,
work_dir: str=None, auto_uid: bool=True, runtime: str=None,
**kwargs):
"""Run a command in a named BuildEnv Docker image.
:param buildenv_target_name: A named Docker image target in which the
command should be run.
:param cmd: The command to run, as you'd pass to subprocess.run()
:param cmd_env: A dictionary of environment variables for the command.
:param work_dir: A different work dir to run in.
Either absolute path, or relative to project root.
:param auto_uid: Whether to run as the active uid:gid, or as root.
:param kwargs: Extra keyword arguments that are passed to the
subprocess.run() call that runs the BuildEnv container
(for, e.g. timeout arg, stdout/err redirection, etc.)
:raises KeyError: If named BuildEnv is not a registered BuildEnv image
"""
buildenv_target = self.targets[buildenv_target_name]
# TODO(itamar): Assert that buildenv_target is up to date
redirection = any(
stream_key in kwargs
for stream_key in ('stdin', 'stdout', 'stderr', 'input'))
docker_run = ['docker', 'run']
# if not self.conf.non_interactive:
# docker_run.append('-i')
if not redirection:
docker_run.append('-t')
project_vol = (self.conf.docker_volume if self.conf.docker_volume else
self.conf.project_root)
container_work_dir = PurePath('/project')
if work_dir:
container_work_dir /= work_dir
if runtime:
docker_run.extend([
'--runtime', runtime,
])
docker_run.extend([
'--rm',
'-v', project_vol + ':/project',
# TODO: windows containers?
'-w', container_work_dir.as_posix(),
])
if cmd_env:
for key, value in cmd_env.items():
# TODO(itamar): escaping
docker_run.extend(['-e', '{}={}'.format(key, value)])
if platform.system() == 'Linux' and auto_uid:
# Fix permissions for bind-mounted project dir
# The fix is not needed when using Docker For Mac / Windows,
# because it is somehow taken care of by the sharing mechanics
docker_run.extend([
'-u', '{}:{}'.format(os.getuid(), os.getgid()),
'-v', '/etc/shadow:/etc/shadow:ro',
'-v', '/etc/group:/etc/group:ro',
'-v', '/etc/passwd:/etc/passwd:ro',
'-v', '/etc/sudoers:/etc/sudoers:ro',
])
docker_run.append(format_qualified_image_name(buildenv_target))
docker_run.extend(cmd)
logger.info('Running command in build env "{}" using command {}',
buildenv_target_name, docker_run)
# TODO: Consider changing the PIPEs to temp files.
if 'stderr' not in kwargs:
kwargs['stderr'] = PIPE
if 'stdout' not in kwargs:
kwargs['stdout'] = PIPE
result = run(docker_run, check=True, **kwargs)
# TODO(Dana): Understand what is the right enconding and remove the
# try except
if kwargs['stdout'] is PIPE:
try:
sys.stdout.write(result.stdout.decode('utf-8'))
except UnicodeEncodeError as e:
sys.stderr.write('tried writing the stdout of {},\n but it '
'has a problematic character:\n {}\n'
'hex dump of stdout:\n{}\n'
.format(docker_run, str(e), codecs.encode(
result.stdout, 'hex').decode('utf8')))
if kwargs['stderr'] is PIPE:
try:
sys.stderr.write(result.stderr.decode('utf-8'))
except UnicodeEncodeError as e:
sys.stderr.write('tried writing the stderr of {},\n but it '
'has a problematic character:\n {}\n'
'hex dump of stderr:\n{}\n'
.format(docker_run, str(e), codecs.encode(
result.stderr, 'hex').decode('utf8')))
return result | python | def run_in_buildenv(
self, buildenv_target_name: str, cmd: list, cmd_env: dict=None,
work_dir: str=None, auto_uid: bool=True, runtime: str=None,
**kwargs):
"""Run a command in a named BuildEnv Docker image.
:param buildenv_target_name: A named Docker image target in which the
command should be run.
:param cmd: The command to run, as you'd pass to subprocess.run()
:param cmd_env: A dictionary of environment variables for the command.
:param work_dir: A different work dir to run in.
Either absolute path, or relative to project root.
:param auto_uid: Whether to run as the active uid:gid, or as root.
:param kwargs: Extra keyword arguments that are passed to the
subprocess.run() call that runs the BuildEnv container
(for, e.g. timeout arg, stdout/err redirection, etc.)
:raises KeyError: If named BuildEnv is not a registered BuildEnv image
"""
buildenv_target = self.targets[buildenv_target_name]
# TODO(itamar): Assert that buildenv_target is up to date
redirection = any(
stream_key in kwargs
for stream_key in ('stdin', 'stdout', 'stderr', 'input'))
docker_run = ['docker', 'run']
# if not self.conf.non_interactive:
# docker_run.append('-i')
if not redirection:
docker_run.append('-t')
project_vol = (self.conf.docker_volume if self.conf.docker_volume else
self.conf.project_root)
container_work_dir = PurePath('/project')
if work_dir:
container_work_dir /= work_dir
if runtime:
docker_run.extend([
'--runtime', runtime,
])
docker_run.extend([
'--rm',
'-v', project_vol + ':/project',
# TODO: windows containers?
'-w', container_work_dir.as_posix(),
])
if cmd_env:
for key, value in cmd_env.items():
# TODO(itamar): escaping
docker_run.extend(['-e', '{}={}'.format(key, value)])
if platform.system() == 'Linux' and auto_uid:
# Fix permissions for bind-mounted project dir
# The fix is not needed when using Docker For Mac / Windows,
# because it is somehow taken care of by the sharing mechanics
docker_run.extend([
'-u', '{}:{}'.format(os.getuid(), os.getgid()),
'-v', '/etc/shadow:/etc/shadow:ro',
'-v', '/etc/group:/etc/group:ro',
'-v', '/etc/passwd:/etc/passwd:ro',
'-v', '/etc/sudoers:/etc/sudoers:ro',
])
docker_run.append(format_qualified_image_name(buildenv_target))
docker_run.extend(cmd)
logger.info('Running command in build env "{}" using command {}',
buildenv_target_name, docker_run)
# TODO: Consider changing the PIPEs to temp files.
if 'stderr' not in kwargs:
kwargs['stderr'] = PIPE
if 'stdout' not in kwargs:
kwargs['stdout'] = PIPE
result = run(docker_run, check=True, **kwargs)
# TODO(Dana): Understand what is the right enconding and remove the
# try except
if kwargs['stdout'] is PIPE:
try:
sys.stdout.write(result.stdout.decode('utf-8'))
except UnicodeEncodeError as e:
sys.stderr.write('tried writing the stdout of {},\n but it '
'has a problematic character:\n {}\n'
'hex dump of stdout:\n{}\n'
.format(docker_run, str(e), codecs.encode(
result.stdout, 'hex').decode('utf8')))
if kwargs['stderr'] is PIPE:
try:
sys.stderr.write(result.stderr.decode('utf-8'))
except UnicodeEncodeError as e:
sys.stderr.write('tried writing the stderr of {},\n but it '
'has a problematic character:\n {}\n'
'hex dump of stderr:\n{}\n'
.format(docker_run, str(e), codecs.encode(
result.stderr, 'hex').decode('utf8')))
return result | [
"def",
"run_in_buildenv",
"(",
"self",
",",
"buildenv_target_name",
":",
"str",
",",
"cmd",
":",
"list",
",",
"cmd_env",
":",
"dict",
"=",
"None",
",",
"work_dir",
":",
"str",
"=",
"None",
",",
"auto_uid",
":",
"bool",
"=",
"True",
",",
"runtime",
":",... | Run a command in a named BuildEnv Docker image.
:param buildenv_target_name: A named Docker image target in which the
command should be run.
:param cmd: The command to run, as you'd pass to subprocess.run()
:param cmd_env: A dictionary of environment variables for the command.
:param work_dir: A different work dir to run in.
Either absolute path, or relative to project root.
:param auto_uid: Whether to run as the active uid:gid, or as root.
:param kwargs: Extra keyword arguments that are passed to the
subprocess.run() call that runs the BuildEnv container
(for, e.g. timeout arg, stdout/err redirection, etc.)
:raises KeyError: If named BuildEnv is not a registered BuildEnv image | [
"Run",
"a",
"command",
"in",
"a",
"named",
"BuildEnv",
"Docker",
"image",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/buildcontext.py#L350-L441 | train | 44,143 |
resonai/ybt | yabt/buildcontext.py | BuildContext.build_target | def build_target(self, target: Target):
"""Invoke the builder function for a target."""
builder = Plugin.builders[target.builder_name]
if builder.func:
logger.debug('About to invoke the {} builder function for {}',
target.builder_name, target.name)
builder.func(self, target)
else:
logger.debug('Skipping {} builder function for target {} (no '
'function registered)', target.builder_name, target) | python | def build_target(self, target: Target):
"""Invoke the builder function for a target."""
builder = Plugin.builders[target.builder_name]
if builder.func:
logger.debug('About to invoke the {} builder function for {}',
target.builder_name, target.name)
builder.func(self, target)
else:
logger.debug('Skipping {} builder function for target {} (no '
'function registered)', target.builder_name, target) | [
"def",
"build_target",
"(",
"self",
",",
"target",
":",
"Target",
")",
":",
"builder",
"=",
"Plugin",
".",
"builders",
"[",
"target",
".",
"builder_name",
"]",
"if",
"builder",
".",
"func",
":",
"logger",
".",
"debug",
"(",
"'About to invoke the {} builder f... | Invoke the builder function for a target. | [
"Invoke",
"the",
"builder",
"function",
"for",
"a",
"target",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/buildcontext.py#L443-L452 | train | 44,144 |
resonai/ybt | yabt/buildcontext.py | BuildContext.register_target_artifact_metadata | def register_target_artifact_metadata(self, target: str, metadata: dict):
"""Register the artifact metadata dictionary for a built target."""
with self.context_lock:
self.artifacts_metadata[target.name] = metadata | python | def register_target_artifact_metadata(self, target: str, metadata: dict):
"""Register the artifact metadata dictionary for a built target."""
with self.context_lock:
self.artifacts_metadata[target.name] = metadata | [
"def",
"register_target_artifact_metadata",
"(",
"self",
",",
"target",
":",
"str",
",",
"metadata",
":",
"dict",
")",
":",
"with",
"self",
".",
"context_lock",
":",
"self",
".",
"artifacts_metadata",
"[",
"target",
".",
"name",
"]",
"=",
"metadata"
] | Register the artifact metadata dictionary for a built target. | [
"Register",
"the",
"artifact",
"metadata",
"dictionary",
"for",
"a",
"built",
"target",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/buildcontext.py#L465-L468 | train | 44,145 |
resonai/ybt | yabt/buildcontext.py | BuildContext.write_artifacts_metadata | def write_artifacts_metadata(self):
"""Write out a JSON file with all built targets artifact metadata,
if such output file is specified."""
if self.conf.artifacts_metadata_file:
logger.info('Writing artifacts metadata to file "%s"',
self.conf.artifacts_metadata_file)
with open(self.conf.artifacts_metadata_file, 'w') as fp:
json.dump(self.artifacts_metadata, fp) | python | def write_artifacts_metadata(self):
"""Write out a JSON file with all built targets artifact metadata,
if such output file is specified."""
if self.conf.artifacts_metadata_file:
logger.info('Writing artifacts metadata to file "%s"',
self.conf.artifacts_metadata_file)
with open(self.conf.artifacts_metadata_file, 'w') as fp:
json.dump(self.artifacts_metadata, fp) | [
"def",
"write_artifacts_metadata",
"(",
"self",
")",
":",
"if",
"self",
".",
"conf",
".",
"artifacts_metadata_file",
":",
"logger",
".",
"info",
"(",
"'Writing artifacts metadata to file \"%s\"'",
",",
"self",
".",
"conf",
".",
"artifacts_metadata_file",
")",
"with"... | Write out a JSON file with all built targets artifact metadata,
if such output file is specified. | [
"Write",
"out",
"a",
"JSON",
"file",
"with",
"all",
"built",
"targets",
"artifact",
"metadata",
"if",
"such",
"output",
"file",
"is",
"specified",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/buildcontext.py#L470-L477 | train | 44,146 |
resonai/ybt | yabt/config.py | Config.get_build_file_path | def get_build_file_path(self, build_module) -> str:
"""Return a full path to the build file of `build_module`.
The returned path will always be OS-native, regardless of the format
of project_root (native) and build_module (with '/').
"""
project_root = Path(self.project_root)
build_module = norm_proj_path(build_module, '')
return str(project_root / build_module /
(BUILD_PROJ_FILE if '' == build_module
else self.build_file_name)) | python | def get_build_file_path(self, build_module) -> str:
"""Return a full path to the build file of `build_module`.
The returned path will always be OS-native, regardless of the format
of project_root (native) and build_module (with '/').
"""
project_root = Path(self.project_root)
build_module = norm_proj_path(build_module, '')
return str(project_root / build_module /
(BUILD_PROJ_FILE if '' == build_module
else self.build_file_name)) | [
"def",
"get_build_file_path",
"(",
"self",
",",
"build_module",
")",
"->",
"str",
":",
"project_root",
"=",
"Path",
"(",
"self",
".",
"project_root",
")",
"build_module",
"=",
"norm_proj_path",
"(",
"build_module",
",",
"''",
")",
"return",
"str",
"(",
"proj... | Return a full path to the build file of `build_module`.
The returned path will always be OS-native, regardless of the format
of project_root (native) and build_module (with '/'). | [
"Return",
"a",
"full",
"path",
"to",
"the",
"build",
"file",
"of",
"build_module",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/config.py#L107-L117 | train | 44,147 |
resonai/ybt | yabt/builders/custom_installer.py | guess_uri_type | def guess_uri_type(uri: str, hint: str=None):
"""Return a guess for the URI type based on the URI string `uri`.
If `hint` is given, it is assumed to be the correct type.
Otherwise, the URI is inspected using urlparse, and we try to guess
whether it's a remote Git repository, a remote downloadable archive,
or a local-only data.
"""
# TODO(itamar): do this better
if hint:
return hint
norm_uri = uri.lower()
parsed_uri = urlparse(norm_uri)
if parsed_uri.path.endswith('.git'):
return 'git'
if parsed_uri.scheme in ('http', 'https'):
ext = splitext(parsed_uri.path)[-1]
if ext in KNOWN_ARCHIVES:
return 'archive'
return 'single'
return 'local' | python | def guess_uri_type(uri: str, hint: str=None):
"""Return a guess for the URI type based on the URI string `uri`.
If `hint` is given, it is assumed to be the correct type.
Otherwise, the URI is inspected using urlparse, and we try to guess
whether it's a remote Git repository, a remote downloadable archive,
or a local-only data.
"""
# TODO(itamar): do this better
if hint:
return hint
norm_uri = uri.lower()
parsed_uri = urlparse(norm_uri)
if parsed_uri.path.endswith('.git'):
return 'git'
if parsed_uri.scheme in ('http', 'https'):
ext = splitext(parsed_uri.path)[-1]
if ext in KNOWN_ARCHIVES:
return 'archive'
return 'single'
return 'local' | [
"def",
"guess_uri_type",
"(",
"uri",
":",
"str",
",",
"hint",
":",
"str",
"=",
"None",
")",
":",
"# TODO(itamar): do this better",
"if",
"hint",
":",
"return",
"hint",
"norm_uri",
"=",
"uri",
".",
"lower",
"(",
")",
"parsed_uri",
"=",
"urlparse",
"(",
"n... | Return a guess for the URI type based on the URI string `uri`.
If `hint` is given, it is assumed to be the correct type.
Otherwise, the URI is inspected using urlparse, and we try to guess
whether it's a remote Git repository, a remote downloadable archive,
or a local-only data. | [
"Return",
"a",
"guess",
"for",
"the",
"URI",
"type",
"based",
"on",
"the",
"URI",
"string",
"uri",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/builders/custom_installer.py#L69-L89 | train | 44,148 |
resonai/ybt | yabt/builders/custom_installer.py | git_handler | def git_handler(unused_build_context, target, fetch, package_dir, tar):
"""Handle remote Git repository URI.
Clone the repository under the private builder workspace (unless already
cloned), and add it to the package tar (filtering out git internals).
TODO(itamar): Support branches / tags / specific commit hashes
TODO(itamar): Support updating a cloned repository
TODO(itamar): Handle submodules?
TODO(itamar): Handle force pulls?
"""
target_name = split_name(target.name)
# clone the repository under a private builder workspace
repo_dir = join(package_dir, fetch.name) if fetch.name else package_dir
try:
repo = git.Repo(repo_dir)
except (InvalidGitRepositoryError, NoSuchPathError):
repo = git.Repo.clone_from(fetch.uri, repo_dir)
assert repo.working_tree_dir == repo_dir
tar.add(package_dir, arcname=target_name, filter=gitfilter) | python | def git_handler(unused_build_context, target, fetch, package_dir, tar):
"""Handle remote Git repository URI.
Clone the repository under the private builder workspace (unless already
cloned), and add it to the package tar (filtering out git internals).
TODO(itamar): Support branches / tags / specific commit hashes
TODO(itamar): Support updating a cloned repository
TODO(itamar): Handle submodules?
TODO(itamar): Handle force pulls?
"""
target_name = split_name(target.name)
# clone the repository under a private builder workspace
repo_dir = join(package_dir, fetch.name) if fetch.name else package_dir
try:
repo = git.Repo(repo_dir)
except (InvalidGitRepositoryError, NoSuchPathError):
repo = git.Repo.clone_from(fetch.uri, repo_dir)
assert repo.working_tree_dir == repo_dir
tar.add(package_dir, arcname=target_name, filter=gitfilter) | [
"def",
"git_handler",
"(",
"unused_build_context",
",",
"target",
",",
"fetch",
",",
"package_dir",
",",
"tar",
")",
":",
"target_name",
"=",
"split_name",
"(",
"target",
".",
"name",
")",
"# clone the repository under a private builder workspace",
"repo_dir",
"=",
... | Handle remote Git repository URI.
Clone the repository under the private builder workspace (unless already
cloned), and add it to the package tar (filtering out git internals).
TODO(itamar): Support branches / tags / specific commit hashes
TODO(itamar): Support updating a cloned repository
TODO(itamar): Handle submodules?
TODO(itamar): Handle force pulls? | [
"Handle",
"remote",
"Git",
"repository",
"URI",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/builders/custom_installer.py#L99-L118 | train | 44,149 |
resonai/ybt | yabt/builders/custom_installer.py | fetch_url | def fetch_url(url, dest, parent_to_remove_before_fetch):
"""Helper function to fetch a file from a URL."""
logger.debug('Downloading file {} from {}', dest, url)
try:
shutil.rmtree(parent_to_remove_before_fetch)
except FileNotFoundError:
pass
os.makedirs(parent_to_remove_before_fetch)
# TODO(itamar): Better downloading (multi-process-multi-threaded?)
# Consider offloading this to a "standalone app" invoked with Docker
resp = requests.get(url, stream=True)
with open(dest, 'wb') as fetch_file:
for chunk in resp.iter_content(chunk_size=32 * 1024):
fetch_file.write(chunk) | python | def fetch_url(url, dest, parent_to_remove_before_fetch):
"""Helper function to fetch a file from a URL."""
logger.debug('Downloading file {} from {}', dest, url)
try:
shutil.rmtree(parent_to_remove_before_fetch)
except FileNotFoundError:
pass
os.makedirs(parent_to_remove_before_fetch)
# TODO(itamar): Better downloading (multi-process-multi-threaded?)
# Consider offloading this to a "standalone app" invoked with Docker
resp = requests.get(url, stream=True)
with open(dest, 'wb') as fetch_file:
for chunk in resp.iter_content(chunk_size=32 * 1024):
fetch_file.write(chunk) | [
"def",
"fetch_url",
"(",
"url",
",",
"dest",
",",
"parent_to_remove_before_fetch",
")",
":",
"logger",
".",
"debug",
"(",
"'Downloading file {} from {}'",
",",
"dest",
",",
"url",
")",
"try",
":",
"shutil",
".",
"rmtree",
"(",
"parent_to_remove_before_fetch",
")... | Helper function to fetch a file from a URL. | [
"Helper",
"function",
"to",
"fetch",
"a",
"file",
"from",
"a",
"URL",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/builders/custom_installer.py#L121-L134 | train | 44,150 |
resonai/ybt | yabt/builders/custom_installer.py | archive_handler | def archive_handler(unused_build_context, target, fetch, package_dir, tar):
"""Handle remote downloadable archive URI.
Download the archive and cache it under the private builer workspace
(unless already downloaded), extract it, and add the content to the
package tar.
TODO(itamar): Support re-downloading if remote changed compared to local.
TODO(itamar): Support more archive formats (currently only tarballs).
"""
package_dest = join(package_dir, basename(urlparse(fetch.uri).path))
package_content_dir = join(package_dir, 'content')
extract_dir = (join(package_content_dir, fetch.name)
if fetch.name else package_content_dir)
fetch_url(fetch.uri, package_dest, package_dir)
# TODO(itamar): Avoid repetition of splitting extension here and above
# TODO(itamar): Don't use `extractall` on potentially untrsuted archives
ext = splitext(package_dest)[-1].lower()
if ext in ('.gz', '.bz2', '.tgz'):
with tarfile.open(package_dest, 'r:*') as src_tar:
src_tar.extractall(extract_dir)
elif ext in ('.zip',):
with ZipFile(package_dest, 'r') as zipf:
zipf.extractall(extract_dir)
else:
raise ValueError('Unsupported extension {}'.format(ext))
tar.add(package_content_dir, arcname=split_name(target.name)) | python | def archive_handler(unused_build_context, target, fetch, package_dir, tar):
"""Handle remote downloadable archive URI.
Download the archive and cache it under the private builer workspace
(unless already downloaded), extract it, and add the content to the
package tar.
TODO(itamar): Support re-downloading if remote changed compared to local.
TODO(itamar): Support more archive formats (currently only tarballs).
"""
package_dest = join(package_dir, basename(urlparse(fetch.uri).path))
package_content_dir = join(package_dir, 'content')
extract_dir = (join(package_content_dir, fetch.name)
if fetch.name else package_content_dir)
fetch_url(fetch.uri, package_dest, package_dir)
# TODO(itamar): Avoid repetition of splitting extension here and above
# TODO(itamar): Don't use `extractall` on potentially untrsuted archives
ext = splitext(package_dest)[-1].lower()
if ext in ('.gz', '.bz2', '.tgz'):
with tarfile.open(package_dest, 'r:*') as src_tar:
src_tar.extractall(extract_dir)
elif ext in ('.zip',):
with ZipFile(package_dest, 'r') as zipf:
zipf.extractall(extract_dir)
else:
raise ValueError('Unsupported extension {}'.format(ext))
tar.add(package_content_dir, arcname=split_name(target.name)) | [
"def",
"archive_handler",
"(",
"unused_build_context",
",",
"target",
",",
"fetch",
",",
"package_dir",
",",
"tar",
")",
":",
"package_dest",
"=",
"join",
"(",
"package_dir",
",",
"basename",
"(",
"urlparse",
"(",
"fetch",
".",
"uri",
")",
".",
"path",
")"... | Handle remote downloadable archive URI.
Download the archive and cache it under the private builer workspace
(unless already downloaded), extract it, and add the content to the
package tar.
TODO(itamar): Support re-downloading if remote changed compared to local.
TODO(itamar): Support more archive formats (currently only tarballs). | [
"Handle",
"remote",
"downloadable",
"archive",
"URI",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/builders/custom_installer.py#L137-L164 | train | 44,151 |
resonai/ybt | yabt/builders/custom_installer.py | fetch_file_handler | def fetch_file_handler(unused_build_context, target, fetch, package_dir, tar):
"""Handle remote downloadable file URI.
Download the file and cache it under the private builer workspace
(unless already downloaded), and add it to the package tar.
TODO(itamar): Support re-downloading if remote changed compared to local.
"""
dl_dir = join(package_dir, fetch.name) if fetch.name else package_dir
fetch_url(fetch.uri,
join(dl_dir, basename(urlparse(fetch.uri).path)),
dl_dir)
tar.add(package_dir, arcname=split_name(target.name)) | python | def fetch_file_handler(unused_build_context, target, fetch, package_dir, tar):
"""Handle remote downloadable file URI.
Download the file and cache it under the private builer workspace
(unless already downloaded), and add it to the package tar.
TODO(itamar): Support re-downloading if remote changed compared to local.
"""
dl_dir = join(package_dir, fetch.name) if fetch.name else package_dir
fetch_url(fetch.uri,
join(dl_dir, basename(urlparse(fetch.uri).path)),
dl_dir)
tar.add(package_dir, arcname=split_name(target.name)) | [
"def",
"fetch_file_handler",
"(",
"unused_build_context",
",",
"target",
",",
"fetch",
",",
"package_dir",
",",
"tar",
")",
":",
"dl_dir",
"=",
"join",
"(",
"package_dir",
",",
"fetch",
".",
"name",
")",
"if",
"fetch",
".",
"name",
"else",
"package_dir",
"... | Handle remote downloadable file URI.
Download the file and cache it under the private builer workspace
(unless already downloaded), and add it to the package tar.
TODO(itamar): Support re-downloading if remote changed compared to local. | [
"Handle",
"remote",
"downloadable",
"file",
"URI",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/builders/custom_installer.py#L167-L179 | train | 44,152 |
resonai/ybt | yabt/builders/custom_installer.py | get_installer_desc | def get_installer_desc(build_context, target) -> tuple:
"""Return a target_name, script_name, package_tarball tuple for `target`"""
workspace_dir = build_context.get_workspace('CustomInstaller', target.name)
target_name = split_name(target.name)
script_name = basename(target.props.script)
package_tarball = '{}.tar.gz'.format(join(workspace_dir, target_name))
return target_name, script_name, package_tarball | python | def get_installer_desc(build_context, target) -> tuple:
"""Return a target_name, script_name, package_tarball tuple for `target`"""
workspace_dir = build_context.get_workspace('CustomInstaller', target.name)
target_name = split_name(target.name)
script_name = basename(target.props.script)
package_tarball = '{}.tar.gz'.format(join(workspace_dir, target_name))
return target_name, script_name, package_tarball | [
"def",
"get_installer_desc",
"(",
"build_context",
",",
"target",
")",
"->",
"tuple",
":",
"workspace_dir",
"=",
"build_context",
".",
"get_workspace",
"(",
"'CustomInstaller'",
",",
"target",
".",
"name",
")",
"target_name",
"=",
"split_name",
"(",
"target",
".... | Return a target_name, script_name, package_tarball tuple for `target` | [
"Return",
"a",
"target_name",
"script_name",
"package_tarball",
"tuple",
"for",
"target"
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/builders/custom_installer.py#L186-L192 | train | 44,153 |
resonai/ybt | yabt/caching.py | get_prebuilt_targets | def get_prebuilt_targets(build_context):
"""Return set of target names that are contained within cached base images
These targets may be considered "pre-built", and skipped during build.
"""
logger.info('Scanning for cached base images')
# deps that are part of cached based images
contained_deps = set()
# deps that are needed by images that are going to be built,
# but are not part of their base images
required_deps = set()
# mapping from target name to set of all its deps (descendants)
cached_descendants = CachedDescendants(build_context.target_graph)
for target_name, target in build_context.targets.items():
if 'image_caching_behavior' not in target.props:
continue
image_name = get_image_name(target)
image_tag = target.props.image_tag
icb = ImageCachingBehavior(image_name, image_tag,
target.props.image_caching_behavior)
target.image_id = handle_build_cache(build_context.conf, image_name,
image_tag, icb)
if target.image_id:
# mark deps of cached base image as "contained"
image_deps = cached_descendants.get(target_name)
contained_deps.update(image_deps)
contained_deps.add(target.name)
else:
# mark deps of image that is going to be built
# (and are not deps of its base image) as "required"
image_deps = cached_descendants.get(target_name)
base_image_deps = cached_descendants.get(target.props.base_image)
required_deps.update(image_deps - base_image_deps)
return contained_deps - required_deps | python | def get_prebuilt_targets(build_context):
"""Return set of target names that are contained within cached base images
These targets may be considered "pre-built", and skipped during build.
"""
logger.info('Scanning for cached base images')
# deps that are part of cached based images
contained_deps = set()
# deps that are needed by images that are going to be built,
# but are not part of their base images
required_deps = set()
# mapping from target name to set of all its deps (descendants)
cached_descendants = CachedDescendants(build_context.target_graph)
for target_name, target in build_context.targets.items():
if 'image_caching_behavior' not in target.props:
continue
image_name = get_image_name(target)
image_tag = target.props.image_tag
icb = ImageCachingBehavior(image_name, image_tag,
target.props.image_caching_behavior)
target.image_id = handle_build_cache(build_context.conf, image_name,
image_tag, icb)
if target.image_id:
# mark deps of cached base image as "contained"
image_deps = cached_descendants.get(target_name)
contained_deps.update(image_deps)
contained_deps.add(target.name)
else:
# mark deps of image that is going to be built
# (and are not deps of its base image) as "required"
image_deps = cached_descendants.get(target_name)
base_image_deps = cached_descendants.get(target.props.base_image)
required_deps.update(image_deps - base_image_deps)
return contained_deps - required_deps | [
"def",
"get_prebuilt_targets",
"(",
"build_context",
")",
":",
"logger",
".",
"info",
"(",
"'Scanning for cached base images'",
")",
"# deps that are part of cached based images",
"contained_deps",
"=",
"set",
"(",
")",
"# deps that are needed by images that are going to be built... | Return set of target names that are contained within cached base images
These targets may be considered "pre-built", and skipped during build. | [
"Return",
"set",
"of",
"target",
"names",
"that",
"are",
"contained",
"within",
"cached",
"base",
"images"
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/caching.py#L70-L104 | train | 44,154 |
resonai/ybt | yabt/caching.py | write_summary | def write_summary(summary: dict, cache_dir: str):
"""Write the `summary` JSON to `cache_dir`.
Updated the accessed timestamp to now before writing.
"""
# update the summary last-accessed timestamp
summary['accessed'] = time()
with open(join(cache_dir, 'summary.json'), 'w') as summary_file:
summary_file.write(json.dumps(summary, indent=4, sort_keys=True)) | python | def write_summary(summary: dict, cache_dir: str):
"""Write the `summary` JSON to `cache_dir`.
Updated the accessed timestamp to now before writing.
"""
# update the summary last-accessed timestamp
summary['accessed'] = time()
with open(join(cache_dir, 'summary.json'), 'w') as summary_file:
summary_file.write(json.dumps(summary, indent=4, sort_keys=True)) | [
"def",
"write_summary",
"(",
"summary",
":",
"dict",
",",
"cache_dir",
":",
"str",
")",
":",
"# update the summary last-accessed timestamp",
"summary",
"[",
"'accessed'",
"]",
"=",
"time",
"(",
")",
"with",
"open",
"(",
"join",
"(",
"cache_dir",
",",
"'summary... | Write the `summary` JSON to `cache_dir`.
Updated the accessed timestamp to now before writing. | [
"Write",
"the",
"summary",
"JSON",
"to",
"cache_dir",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/caching.py#L107-L115 | train | 44,155 |
resonai/ybt | yabt/caching.py | copy_artifact | def copy_artifact(src_path: str, artifact_hash: str, conf: Config):
"""Copy the artifact at `src_path` with hash `artifact_hash` to artifacts
cache dir.
If an artifact already exists at that location, it is assumed to be
identical (since it's based on hash), and the copy is skipped.
TODO: pruning policy to limit cache size.
"""
cache_dir = conf.get_artifacts_cache_dir()
if not isdir(cache_dir):
makedirs(cache_dir)
cached_artifact_path = join(cache_dir, artifact_hash)
if isfile(cached_artifact_path) or isdir(cached_artifact_path):
logger.debug('Skipping copy of existing cached artifact {} -> {}',
src_path, cached_artifact_path)
return
abs_src_path = join(conf.project_root, src_path)
logger.debug('Caching artifact {} under {}',
abs_src_path, cached_artifact_path)
shutil.copy(abs_src_path, cached_artifact_path) | python | def copy_artifact(src_path: str, artifact_hash: str, conf: Config):
"""Copy the artifact at `src_path` with hash `artifact_hash` to artifacts
cache dir.
If an artifact already exists at that location, it is assumed to be
identical (since it's based on hash), and the copy is skipped.
TODO: pruning policy to limit cache size.
"""
cache_dir = conf.get_artifacts_cache_dir()
if not isdir(cache_dir):
makedirs(cache_dir)
cached_artifact_path = join(cache_dir, artifact_hash)
if isfile(cached_artifact_path) or isdir(cached_artifact_path):
logger.debug('Skipping copy of existing cached artifact {} -> {}',
src_path, cached_artifact_path)
return
abs_src_path = join(conf.project_root, src_path)
logger.debug('Caching artifact {} under {}',
abs_src_path, cached_artifact_path)
shutil.copy(abs_src_path, cached_artifact_path) | [
"def",
"copy_artifact",
"(",
"src_path",
":",
"str",
",",
"artifact_hash",
":",
"str",
",",
"conf",
":",
"Config",
")",
":",
"cache_dir",
"=",
"conf",
".",
"get_artifacts_cache_dir",
"(",
")",
"if",
"not",
"isdir",
"(",
"cache_dir",
")",
":",
"makedirs",
... | Copy the artifact at `src_path` with hash `artifact_hash` to artifacts
cache dir.
If an artifact already exists at that location, it is assumed to be
identical (since it's based on hash), and the copy is skipped.
TODO: pruning policy to limit cache size. | [
"Copy",
"the",
"artifact",
"at",
"src_path",
"with",
"hash",
"artifact_hash",
"to",
"artifacts",
"cache",
"dir",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/caching.py#L177-L197 | train | 44,156 |
resonai/ybt | yabt/caching.py | restore_artifact | def restore_artifact(src_path: str, artifact_hash: str, conf: Config):
"""Restore the artifact whose hash is `artifact_hash` to `src_path`.
Return True if cached artifact is found, valid, and restored successfully.
Otherwise return False.
"""
cache_dir = conf.get_artifacts_cache_dir()
if not isdir(cache_dir):
return False
cached_artifact_path = join(cache_dir, artifact_hash)
if isfile(cached_artifact_path) or isdir(cached_artifact_path):
# verify cached item hash matches expected hash
actual_hash = hash_tree(cached_artifact_path)
if actual_hash != artifact_hash:
logger.warning(
'Cached artifact {} expected hash {} != actual hash {}',
src_path, artifact_hash, actual_hash)
rmnode(cached_artifact_path)
return False
# if something exists in src_path, check if it matches the cached item
abs_src_path = join(conf.project_root, src_path)
if isfile(abs_src_path) or isdir(abs_src_path):
existing_hash = hash_tree(src_path)
if existing_hash == artifact_hash:
logger.debug('Existing artifact {} matches cached hash {}',
src_path, artifact_hash)
return True
logger.debug('Replacing existing artifact {} with cached one',
src_path)
rmnode(abs_src_path)
logger.debug('Restoring cached artifact {} to {}',
artifact_hash, src_path)
shutil.copy(cached_artifact_path, abs_src_path)
return True
logger.debug('No cached artifact for {} with hash {}',
src_path, artifact_hash)
return False | python | def restore_artifact(src_path: str, artifact_hash: str, conf: Config):
"""Restore the artifact whose hash is `artifact_hash` to `src_path`.
Return True if cached artifact is found, valid, and restored successfully.
Otherwise return False.
"""
cache_dir = conf.get_artifacts_cache_dir()
if not isdir(cache_dir):
return False
cached_artifact_path = join(cache_dir, artifact_hash)
if isfile(cached_artifact_path) or isdir(cached_artifact_path):
# verify cached item hash matches expected hash
actual_hash = hash_tree(cached_artifact_path)
if actual_hash != artifact_hash:
logger.warning(
'Cached artifact {} expected hash {} != actual hash {}',
src_path, artifact_hash, actual_hash)
rmnode(cached_artifact_path)
return False
# if something exists in src_path, check if it matches the cached item
abs_src_path = join(conf.project_root, src_path)
if isfile(abs_src_path) or isdir(abs_src_path):
existing_hash = hash_tree(src_path)
if existing_hash == artifact_hash:
logger.debug('Existing artifact {} matches cached hash {}',
src_path, artifact_hash)
return True
logger.debug('Replacing existing artifact {} with cached one',
src_path)
rmnode(abs_src_path)
logger.debug('Restoring cached artifact {} to {}',
artifact_hash, src_path)
shutil.copy(cached_artifact_path, abs_src_path)
return True
logger.debug('No cached artifact for {} with hash {}',
src_path, artifact_hash)
return False | [
"def",
"restore_artifact",
"(",
"src_path",
":",
"str",
",",
"artifact_hash",
":",
"str",
",",
"conf",
":",
"Config",
")",
":",
"cache_dir",
"=",
"conf",
".",
"get_artifacts_cache_dir",
"(",
")",
"if",
"not",
"isdir",
"(",
"cache_dir",
")",
":",
"return",
... | Restore the artifact whose hash is `artifact_hash` to `src_path`.
Return True if cached artifact is found, valid, and restored successfully.
Otherwise return False. | [
"Restore",
"the",
"artifact",
"whose",
"hash",
"is",
"artifact_hash",
"to",
"src_path",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/caching.py#L200-L236 | train | 44,157 |
resonai/ybt | yabt/caching.py | save_target_in_cache | def save_target_in_cache(target: Target, build_context):
"""Save `target` to build cache for future reuse.
The target hash is used to determine its cache location,
where the target metadata and artifacts metadata are seriazlied to JSON.
In addition, relevant artifacts produced by the target are copied under
the artifacts cache dir by their content hash.
TODO: pruning policy to limit cache size.
"""
cache_dir = build_context.conf.get_cache_dir(target, build_context)
if isdir(cache_dir):
rmtree(cache_dir)
makedirs(cache_dir)
logger.debug('Saving target metadata in cache under {}', cache_dir)
# write target metadata
with open(join(cache_dir, 'target.json'), 'w') as meta_file:
meta_file.write(target.json(build_context))
# copy artifacts to artifact cache by hash
artifacts = target.artifacts.get_all()
artifact_hashes = {}
for artifact_type, artifact_map in artifacts.items():
if artifact_type in (AT.docker_image,):
continue
for dst_path, src_path in artifact_map.items():
artifact_hashes[dst_path] = hash_tree(src_path)
# not caching "app" artifacts, since they're part
# of the source tree
if artifact_type not in _NO_CACHE_TYPES:
copy_artifact(src_path, artifact_hashes[dst_path],
build_context.conf)
# serialize target artifacts metadata + hashes
artifacts_desc = {
artifact_type.name:
[{'dst': dst_path, 'src': src_path,
'hash': artifact_hashes.get(dst_path)}
for dst_path, src_path in artifact_map.items()]
for artifact_type, artifact_map in artifacts.items()
}
with open(join(cache_dir, 'artifacts.json'), 'w') as artifacts_meta_file:
artifacts_meta_file.write(json.dumps(artifacts_desc, indent=4,
sort_keys=True))
# copying the summary dict so I can modify it without mutating the target
summary = dict(target.summary)
summary['name'] = target.name
summary['artifacts_hash'] = hash_tree(join(cache_dir, 'artifacts.json'))
if summary.get('created') is None:
summary['created'] = time()
write_summary(summary, cache_dir) | python | def save_target_in_cache(target: Target, build_context):
"""Save `target` to build cache for future reuse.
The target hash is used to determine its cache location,
where the target metadata and artifacts metadata are seriazlied to JSON.
In addition, relevant artifacts produced by the target are copied under
the artifacts cache dir by their content hash.
TODO: pruning policy to limit cache size.
"""
cache_dir = build_context.conf.get_cache_dir(target, build_context)
if isdir(cache_dir):
rmtree(cache_dir)
makedirs(cache_dir)
logger.debug('Saving target metadata in cache under {}', cache_dir)
# write target metadata
with open(join(cache_dir, 'target.json'), 'w') as meta_file:
meta_file.write(target.json(build_context))
# copy artifacts to artifact cache by hash
artifacts = target.artifacts.get_all()
artifact_hashes = {}
for artifact_type, artifact_map in artifacts.items():
if artifact_type in (AT.docker_image,):
continue
for dst_path, src_path in artifact_map.items():
artifact_hashes[dst_path] = hash_tree(src_path)
# not caching "app" artifacts, since they're part
# of the source tree
if artifact_type not in _NO_CACHE_TYPES:
copy_artifact(src_path, artifact_hashes[dst_path],
build_context.conf)
# serialize target artifacts metadata + hashes
artifacts_desc = {
artifact_type.name:
[{'dst': dst_path, 'src': src_path,
'hash': artifact_hashes.get(dst_path)}
for dst_path, src_path in artifact_map.items()]
for artifact_type, artifact_map in artifacts.items()
}
with open(join(cache_dir, 'artifacts.json'), 'w') as artifacts_meta_file:
artifacts_meta_file.write(json.dumps(artifacts_desc, indent=4,
sort_keys=True))
# copying the summary dict so I can modify it without mutating the target
summary = dict(target.summary)
summary['name'] = target.name
summary['artifacts_hash'] = hash_tree(join(cache_dir, 'artifacts.json'))
if summary.get('created') is None:
summary['created'] = time()
write_summary(summary, cache_dir) | [
"def",
"save_target_in_cache",
"(",
"target",
":",
"Target",
",",
"build_context",
")",
":",
"cache_dir",
"=",
"build_context",
".",
"conf",
".",
"get_cache_dir",
"(",
"target",
",",
"build_context",
")",
"if",
"isdir",
"(",
"cache_dir",
")",
":",
"rmtree",
... | Save `target` to build cache for future reuse.
The target hash is used to determine its cache location,
where the target metadata and artifacts metadata are seriazlied to JSON.
In addition, relevant artifacts produced by the target are copied under
the artifacts cache dir by their content hash.
TODO: pruning policy to limit cache size. | [
"Save",
"target",
"to",
"build",
"cache",
"for",
"future",
"reuse",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/caching.py#L239-L287 | train | 44,158 |
resonai/ybt | yabt/caching.py | CachedDescendants.get | def get(self, key):
"""Return set of descendants of node named `key` in `target_graph`.
Returns from cached dict if exists, otherwise compute over the graph
and cache results in the dict.
"""
if key not in self:
self[key] = set(get_descendants(self._target_graph, key))
return self[key] | python | def get(self, key):
"""Return set of descendants of node named `key` in `target_graph`.
Returns from cached dict if exists, otherwise compute over the graph
and cache results in the dict.
"""
if key not in self:
self[key] = set(get_descendants(self._target_graph, key))
return self[key] | [
"def",
"get",
"(",
"self",
",",
"key",
")",
":",
"if",
"key",
"not",
"in",
"self",
":",
"self",
"[",
"key",
"]",
"=",
"set",
"(",
"get_descendants",
"(",
"self",
".",
"_target_graph",
",",
"key",
")",
")",
"return",
"self",
"[",
"key",
"]"
] | Return set of descendants of node named `key` in `target_graph`.
Returns from cached dict if exists, otherwise compute over the graph
and cache results in the dict. | [
"Return",
"set",
"of",
"descendants",
"of",
"node",
"named",
"key",
"in",
"target_graph",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/caching.py#L59-L67 | train | 44,159 |
resonai/ybt | yabt/utils.py | fatal | def fatal(msg, *args, **kwargs):
"""Print a red `msg` to STDERR and exit.
To be used in a context of an exception, also prints out the exception.
The message is formatted with `args` & `kwargs`.
"""
exc_str = format_exc()
if exc_str.strip() != 'NoneType: None':
logger.info('{}', format_exc())
fatal_noexc(msg, *args, **kwargs) | python | def fatal(msg, *args, **kwargs):
"""Print a red `msg` to STDERR and exit.
To be used in a context of an exception, also prints out the exception.
The message is formatted with `args` & `kwargs`.
"""
exc_str = format_exc()
if exc_str.strip() != 'NoneType: None':
logger.info('{}', format_exc())
fatal_noexc(msg, *args, **kwargs) | [
"def",
"fatal",
"(",
"msg",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"exc_str",
"=",
"format_exc",
"(",
")",
"if",
"exc_str",
".",
"strip",
"(",
")",
"!=",
"'NoneType: None'",
":",
"logger",
".",
"info",
"(",
"'{}'",
",",
"format_exc",
... | Print a red `msg` to STDERR and exit.
To be used in a context of an exception, also prints out the exception.
The message is formatted with `args` & `kwargs`. | [
"Print",
"a",
"red",
"msg",
"to",
"STDERR",
"and",
"exit",
".",
"To",
"be",
"used",
"in",
"a",
"context",
"of",
"an",
"exception",
"also",
"prints",
"out",
"the",
"exception",
".",
"The",
"message",
"is",
"formatted",
"with",
"args",
"&",
"kwargs",
"."... | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/utils.py#L43-L51 | train | 44,160 |
resonai/ybt | yabt/utils.py | fatal_noexc | def fatal_noexc(msg, *args, **kwargs):
"""Print a red `msg` to STDERR and exit.
The message is formatted with `args` & `kwargs`.
"""
print(Fore.RED + 'Fatal: ' + msg.format(*args, **kwargs) + Style.RESET_ALL,
file=sys.stderr)
sys.exit(1) | python | def fatal_noexc(msg, *args, **kwargs):
"""Print a red `msg` to STDERR and exit.
The message is formatted with `args` & `kwargs`.
"""
print(Fore.RED + 'Fatal: ' + msg.format(*args, **kwargs) + Style.RESET_ALL,
file=sys.stderr)
sys.exit(1) | [
"def",
"fatal_noexc",
"(",
"msg",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"print",
"(",
"Fore",
".",
"RED",
"+",
"'Fatal: '",
"+",
"msg",
".",
"format",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"+",
"Style",
".",
"RESET_ALL",
... | Print a red `msg` to STDERR and exit.
The message is formatted with `args` & `kwargs`. | [
"Print",
"a",
"red",
"msg",
"to",
"STDERR",
"and",
"exit",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/utils.py#L54-L61 | train | 44,161 |
resonai/ybt | yabt/utils.py | rmnode | def rmnode(path: str):
"""Forcibly remove file or directory tree at `path`.
Fail silently if base dir doesn't exist."""
if isdir(path):
rmtree(path)
elif isfile(path):
os.remove(path) | python | def rmnode(path: str):
"""Forcibly remove file or directory tree at `path`.
Fail silently if base dir doesn't exist."""
if isdir(path):
rmtree(path)
elif isfile(path):
os.remove(path) | [
"def",
"rmnode",
"(",
"path",
":",
"str",
")",
":",
"if",
"isdir",
"(",
"path",
")",
":",
"rmtree",
"(",
"path",
")",
"elif",
"isfile",
"(",
"path",
")",
":",
"os",
".",
"remove",
"(",
"path",
")"
] | Forcibly remove file or directory tree at `path`.
Fail silently if base dir doesn't exist. | [
"Forcibly",
"remove",
"file",
"or",
"directory",
"tree",
"at",
"path",
".",
"Fail",
"silently",
"if",
"base",
"dir",
"doesn",
"t",
"exist",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/utils.py#L64-L70 | train | 44,162 |
resonai/ybt | yabt/utils.py | link_files | def link_files(files: set, workspace_src_dir: str,
common_parent: str, conf):
"""Sync the list of files and directories in `files` to destination
directory specified by `workspace_src_dir`.
"Sync" in the sense that every file given in `files` will be
hard-linked under `workspace_src_dir` after this function returns, and no
other files will exist under `workspace_src_dir`.
For directories in `files`, hard-links of contained files are
created recursively.
All paths in `files`, and the `workspace_src_dir`, must be relative
to `conf.project_root`.
If `common_parent` is given, and it is a common parent directory of all
`files`, then the `commonm_parent` part is truncated from the
sync'ed files destination path under `workspace_src_dir`.
:raises FileNotFoundError: If `files` contains files or directories
that do not exist.
:raises ValueError: If `common_parent` is given (not `None`), but is *NOT*
a common parent of all `files`.
"""
norm_dir = normpath(workspace_src_dir)
base_dir = ''
if common_parent:
common_parent = normpath(common_parent)
base_dir = commonpath(list(files) + [common_parent])
if base_dir != common_parent:
raise ValueError('{} is not the common parent of all target '
'sources and data'.format(common_parent))
logger.debug(
'Rebasing files in image relative to common parent dir {}',
base_dir)
num_linked = 0
for src in files:
abs_src = join(conf.project_root, src)
abs_dest = join(conf.project_root, workspace_src_dir,
relpath(src, base_dir))
link_node(abs_src, abs_dest, conf.builders_workspace_dir in src)
num_linked += 1
return num_linked | python | def link_files(files: set, workspace_src_dir: str,
common_parent: str, conf):
"""Sync the list of files and directories in `files` to destination
directory specified by `workspace_src_dir`.
"Sync" in the sense that every file given in `files` will be
hard-linked under `workspace_src_dir` after this function returns, and no
other files will exist under `workspace_src_dir`.
For directories in `files`, hard-links of contained files are
created recursively.
All paths in `files`, and the `workspace_src_dir`, must be relative
to `conf.project_root`.
If `common_parent` is given, and it is a common parent directory of all
`files`, then the `commonm_parent` part is truncated from the
sync'ed files destination path under `workspace_src_dir`.
:raises FileNotFoundError: If `files` contains files or directories
that do not exist.
:raises ValueError: If `common_parent` is given (not `None`), but is *NOT*
a common parent of all `files`.
"""
norm_dir = normpath(workspace_src_dir)
base_dir = ''
if common_parent:
common_parent = normpath(common_parent)
base_dir = commonpath(list(files) + [common_parent])
if base_dir != common_parent:
raise ValueError('{} is not the common parent of all target '
'sources and data'.format(common_parent))
logger.debug(
'Rebasing files in image relative to common parent dir {}',
base_dir)
num_linked = 0
for src in files:
abs_src = join(conf.project_root, src)
abs_dest = join(conf.project_root, workspace_src_dir,
relpath(src, base_dir))
link_node(abs_src, abs_dest, conf.builders_workspace_dir in src)
num_linked += 1
return num_linked | [
"def",
"link_files",
"(",
"files",
":",
"set",
",",
"workspace_src_dir",
":",
"str",
",",
"common_parent",
":",
"str",
",",
"conf",
")",
":",
"norm_dir",
"=",
"normpath",
"(",
"workspace_src_dir",
")",
"base_dir",
"=",
"''",
"if",
"common_parent",
":",
"co... | Sync the list of files and directories in `files` to destination
directory specified by `workspace_src_dir`.
"Sync" in the sense that every file given in `files` will be
hard-linked under `workspace_src_dir` after this function returns, and no
other files will exist under `workspace_src_dir`.
For directories in `files`, hard-links of contained files are
created recursively.
All paths in `files`, and the `workspace_src_dir`, must be relative
to `conf.project_root`.
If `common_parent` is given, and it is a common parent directory of all
`files`, then the `commonm_parent` part is truncated from the
sync'ed files destination path under `workspace_src_dir`.
:raises FileNotFoundError: If `files` contains files or directories
that do not exist.
:raises ValueError: If `common_parent` is given (not `None`), but is *NOT*
a common parent of all `files`. | [
"Sync",
"the",
"list",
"of",
"files",
"and",
"directories",
"in",
"files",
"to",
"destination",
"directory",
"specified",
"by",
"workspace_src_dir",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/utils.py#L114-L157 | train | 44,163 |
resonai/ybt | yabt/utils.py | norm_proj_path | def norm_proj_path(path, build_module):
"""Return a normalized path for the `path` observed in `build_module`.
The normalized path is "normalized" (in the `os.path.normpath` sense),
relative from the project root directory, and OS-native.
Supports making references from project root directory by prefixing the
path with "//".
:raises ValueError: If path references outside the project sandbox.
"""
if path == '//':
return ''
if path.startswith('//'):
norm = normpath(path[2:])
if norm[0] in ('.', '/', '\\'):
raise ValueError("Invalid path: `{}'".format(path))
return norm
if path.startswith('/'):
raise ValueError("Invalid path: `{}' - use '//' to start from "
"project root".format(path))
if build_module == '//':
build_module = ''
norm = normpath(join(build_module, path))
if norm.startswith('..'):
raise ValueError(
"Invalid path `{}' - must remain inside project sandbox"
.format(path))
return norm.strip('.') | python | def norm_proj_path(path, build_module):
"""Return a normalized path for the `path` observed in `build_module`.
The normalized path is "normalized" (in the `os.path.normpath` sense),
relative from the project root directory, and OS-native.
Supports making references from project root directory by prefixing the
path with "//".
:raises ValueError: If path references outside the project sandbox.
"""
if path == '//':
return ''
if path.startswith('//'):
norm = normpath(path[2:])
if norm[0] in ('.', '/', '\\'):
raise ValueError("Invalid path: `{}'".format(path))
return norm
if path.startswith('/'):
raise ValueError("Invalid path: `{}' - use '//' to start from "
"project root".format(path))
if build_module == '//':
build_module = ''
norm = normpath(join(build_module, path))
if norm.startswith('..'):
raise ValueError(
"Invalid path `{}' - must remain inside project sandbox"
.format(path))
return norm.strip('.') | [
"def",
"norm_proj_path",
"(",
"path",
",",
"build_module",
")",
":",
"if",
"path",
"==",
"'//'",
":",
"return",
"''",
"if",
"path",
".",
"startswith",
"(",
"'//'",
")",
":",
"norm",
"=",
"normpath",
"(",
"path",
"[",
"2",
":",
"]",
")",
"if",
"norm... | Return a normalized path for the `path` observed in `build_module`.
The normalized path is "normalized" (in the `os.path.normpath` sense),
relative from the project root directory, and OS-native.
Supports making references from project root directory by prefixing the
path with "//".
:raises ValueError: If path references outside the project sandbox. | [
"Return",
"a",
"normalized",
"path",
"for",
"the",
"path",
"observed",
"in",
"build_module",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/utils.py#L160-L191 | train | 44,164 |
resonai/ybt | yabt/utils.py | acc_hash | def acc_hash(filepath: str, hasher):
"""Accumulate content of file at `filepath` in `hasher`."""
with open(filepath, 'rb') as f:
while True:
chunk = f.read(_BUF_SIZE)
if not chunk:
break
hasher.update(chunk) | python | def acc_hash(filepath: str, hasher):
"""Accumulate content of file at `filepath` in `hasher`."""
with open(filepath, 'rb') as f:
while True:
chunk = f.read(_BUF_SIZE)
if not chunk:
break
hasher.update(chunk) | [
"def",
"acc_hash",
"(",
"filepath",
":",
"str",
",",
"hasher",
")",
":",
"with",
"open",
"(",
"filepath",
",",
"'rb'",
")",
"as",
"f",
":",
"while",
"True",
":",
"chunk",
"=",
"f",
".",
"read",
"(",
"_BUF_SIZE",
")",
"if",
"not",
"chunk",
":",
"b... | Accumulate content of file at `filepath` in `hasher`. | [
"Accumulate",
"content",
"of",
"file",
"at",
"filepath",
"in",
"hasher",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/utils.py#L238-L245 | train | 44,165 |
resonai/ybt | yabt/utils.py | hash_file | def hash_file(filepath: str) -> str:
"""Return the hexdigest MD5 hash of content of file at `filepath`."""
md5 = hashlib.md5()
acc_hash(filepath, md5)
return md5.hexdigest() | python | def hash_file(filepath: str) -> str:
"""Return the hexdigest MD5 hash of content of file at `filepath`."""
md5 = hashlib.md5()
acc_hash(filepath, md5)
return md5.hexdigest() | [
"def",
"hash_file",
"(",
"filepath",
":",
"str",
")",
"->",
"str",
":",
"md5",
"=",
"hashlib",
".",
"md5",
"(",
")",
"acc_hash",
"(",
"filepath",
",",
"md5",
")",
"return",
"md5",
".",
"hexdigest",
"(",
")"
] | Return the hexdigest MD5 hash of content of file at `filepath`. | [
"Return",
"the",
"hexdigest",
"MD5",
"hash",
"of",
"content",
"of",
"file",
"at",
"filepath",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/utils.py#L248-L252 | train | 44,166 |
resonai/ybt | yabt/utils.py | hash_tree | def hash_tree(filepath: str) -> str:
"""Return the hexdigest MD5 hash of file or directory at `filepath`.
If file - just hash file content.
If directory - walk the directory, and accumulate hashes of all the
relative paths + contents of files under the directory.
"""
if isfile(filepath):
return hash_file(filepath)
if isdir(filepath):
base_dir = filepath
md5 = hashlib.md5()
for root, dirs, files in walk(base_dir):
dirs.sort()
for fname in sorted(files):
filepath = join(root, fname)
# consistent hashing between POSIX & Windows
md5.update(relpath(filepath, base_dir)
.replace('\\', '/').encode('utf8'))
acc_hash(filepath, md5)
return md5.hexdigest()
return None | python | def hash_tree(filepath: str) -> str:
"""Return the hexdigest MD5 hash of file or directory at `filepath`.
If file - just hash file content.
If directory - walk the directory, and accumulate hashes of all the
relative paths + contents of files under the directory.
"""
if isfile(filepath):
return hash_file(filepath)
if isdir(filepath):
base_dir = filepath
md5 = hashlib.md5()
for root, dirs, files in walk(base_dir):
dirs.sort()
for fname in sorted(files):
filepath = join(root, fname)
# consistent hashing between POSIX & Windows
md5.update(relpath(filepath, base_dir)
.replace('\\', '/').encode('utf8'))
acc_hash(filepath, md5)
return md5.hexdigest()
return None | [
"def",
"hash_tree",
"(",
"filepath",
":",
"str",
")",
"->",
"str",
":",
"if",
"isfile",
"(",
"filepath",
")",
":",
"return",
"hash_file",
"(",
"filepath",
")",
"if",
"isdir",
"(",
"filepath",
")",
":",
"base_dir",
"=",
"filepath",
"md5",
"=",
"hashlib"... | Return the hexdigest MD5 hash of file or directory at `filepath`.
If file - just hash file content.
If directory - walk the directory, and accumulate hashes of all the
relative paths + contents of files under the directory. | [
"Return",
"the",
"hexdigest",
"MD5",
"hash",
"of",
"file",
"or",
"directory",
"at",
"filepath",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/utils.py#L255-L276 | train | 44,167 |
resonai/ybt | yabt/artifact.py | ArtifactStore.add | def add(self, artifact_type: ArtifactType, src_path: str,
dst_path: str=None):
"""Add an artifact of type `artifact_type` at `src_path`.
`src_path` should be the path of the file relative to project root.
`dst_path`, if given, is the desired path of the artifact in dependent
targets, relative to its base path (by type).
"""
if dst_path is None:
dst_path = src_path
other_src_path = self._artifacts[artifact_type].setdefault(
dst_path, src_path)
if src_path != other_src_path:
raise RuntimeError(
'{} artifact with dest path {} exists with different src '
'path: {} != {}'.format(artifact_type, dst_path, src_path,
other_src_path)) | python | def add(self, artifact_type: ArtifactType, src_path: str,
dst_path: str=None):
"""Add an artifact of type `artifact_type` at `src_path`.
`src_path` should be the path of the file relative to project root.
`dst_path`, if given, is the desired path of the artifact in dependent
targets, relative to its base path (by type).
"""
if dst_path is None:
dst_path = src_path
other_src_path = self._artifacts[artifact_type].setdefault(
dst_path, src_path)
if src_path != other_src_path:
raise RuntimeError(
'{} artifact with dest path {} exists with different src '
'path: {} != {}'.format(artifact_type, dst_path, src_path,
other_src_path)) | [
"def",
"add",
"(",
"self",
",",
"artifact_type",
":",
"ArtifactType",
",",
"src_path",
":",
"str",
",",
"dst_path",
":",
"str",
"=",
"None",
")",
":",
"if",
"dst_path",
"is",
"None",
":",
"dst_path",
"=",
"src_path",
"other_src_path",
"=",
"self",
".",
... | Add an artifact of type `artifact_type` at `src_path`.
`src_path` should be the path of the file relative to project root.
`dst_path`, if given, is the desired path of the artifact in dependent
targets, relative to its base path (by type). | [
"Add",
"an",
"artifact",
"of",
"type",
"artifact_type",
"at",
"src_path",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/artifact.py#L89-L105 | train | 44,168 |
resonai/ybt | yabt/artifact.py | ArtifactStore.extend | def extend(self, artifact_type: ArtifactType, src_paths: list):
"""Add all `src_paths` as artifact of type `artifact_type`."""
for src_path in src_paths:
self.add(artifact_type, src_path, src_path) | python | def extend(self, artifact_type: ArtifactType, src_paths: list):
"""Add all `src_paths` as artifact of type `artifact_type`."""
for src_path in src_paths:
self.add(artifact_type, src_path, src_path) | [
"def",
"extend",
"(",
"self",
",",
"artifact_type",
":",
"ArtifactType",
",",
"src_paths",
":",
"list",
")",
":",
"for",
"src_path",
"in",
"src_paths",
":",
"self",
".",
"add",
"(",
"artifact_type",
",",
"src_path",
",",
"src_path",
")"
] | Add all `src_paths` as artifact of type `artifact_type`. | [
"Add",
"all",
"src_paths",
"as",
"artifact",
"of",
"type",
"artifact_type",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/artifact.py#L107-L110 | train | 44,169 |
resonai/ybt | yabt/artifact.py | ArtifactStore.link_types | def link_types(self, base_dir: str, types: list, conf: Config) -> int:
"""Link all artifacts with types `types` under `base_dir` and return
the number of linked artifacts."""
num_linked = 0
for kind in types:
artifact_map = self._artifacts.get(kind)
if not artifact_map:
continue
num_linked += self._link(join(base_dir, self.type_to_dir[kind]),
artifact_map, conf)
return num_linked | python | def link_types(self, base_dir: str, types: list, conf: Config) -> int:
"""Link all artifacts with types `types` under `base_dir` and return
the number of linked artifacts."""
num_linked = 0
for kind in types:
artifact_map = self._artifacts.get(kind)
if not artifact_map:
continue
num_linked += self._link(join(base_dir, self.type_to_dir[kind]),
artifact_map, conf)
return num_linked | [
"def",
"link_types",
"(",
"self",
",",
"base_dir",
":",
"str",
",",
"types",
":",
"list",
",",
"conf",
":",
"Config",
")",
"->",
"int",
":",
"num_linked",
"=",
"0",
"for",
"kind",
"in",
"types",
":",
"artifact_map",
"=",
"self",
".",
"_artifacts",
".... | Link all artifacts with types `types` under `base_dir` and return
the number of linked artifacts. | [
"Link",
"all",
"artifacts",
"with",
"types",
"types",
"under",
"base_dir",
"and",
"return",
"the",
"number",
"of",
"linked",
"artifacts",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/artifact.py#L123-L133 | train | 44,170 |
resonai/ybt | yabt/artifact.py | ArtifactStore.link_for_image | def link_for_image(self, base_dir: str, conf: Config) -> int:
"""Link all artifacts required for a Docker image under `base_dir` and
return the number of linked artifacts."""
return self.link_types(
base_dir,
[ArtifactType.app, ArtifactType.binary, ArtifactType.gen_py],
conf) | python | def link_for_image(self, base_dir: str, conf: Config) -> int:
"""Link all artifacts required for a Docker image under `base_dir` and
return the number of linked artifacts."""
return self.link_types(
base_dir,
[ArtifactType.app, ArtifactType.binary, ArtifactType.gen_py],
conf) | [
"def",
"link_for_image",
"(",
"self",
",",
"base_dir",
":",
"str",
",",
"conf",
":",
"Config",
")",
"->",
"int",
":",
"return",
"self",
".",
"link_types",
"(",
"base_dir",
",",
"[",
"ArtifactType",
".",
"app",
",",
"ArtifactType",
".",
"binary",
",",
"... | Link all artifacts required for a Docker image under `base_dir` and
return the number of linked artifacts. | [
"Link",
"all",
"artifacts",
"required",
"for",
"a",
"Docker",
"image",
"under",
"base_dir",
"and",
"return",
"the",
"number",
"of",
"linked",
"artifacts",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/artifact.py#L135-L141 | train | 44,171 |
resonai/ybt | yabt/artifact.py | ArtifactStore._link | def _link(self, base_dir: str, artifact_map: dict, conf: Config):
"""Link all artifacts in `artifact_map` under `base_dir` and return
the number of artifacts linked."""
num_linked = 0
for dst, src in artifact_map.items():
abs_src = join(conf.project_root, src)
abs_dest = join(conf.project_root, base_dir, dst)
link_node(abs_src, abs_dest)
num_linked += 1
return num_linked | python | def _link(self, base_dir: str, artifact_map: dict, conf: Config):
"""Link all artifacts in `artifact_map` under `base_dir` and return
the number of artifacts linked."""
num_linked = 0
for dst, src in artifact_map.items():
abs_src = join(conf.project_root, src)
abs_dest = join(conf.project_root, base_dir, dst)
link_node(abs_src, abs_dest)
num_linked += 1
return num_linked | [
"def",
"_link",
"(",
"self",
",",
"base_dir",
":",
"str",
",",
"artifact_map",
":",
"dict",
",",
"conf",
":",
"Config",
")",
":",
"num_linked",
"=",
"0",
"for",
"dst",
",",
"src",
"in",
"artifact_map",
".",
"items",
"(",
")",
":",
"abs_src",
"=",
"... | Link all artifacts in `artifact_map` under `base_dir` and return
the number of artifacts linked. | [
"Link",
"all",
"artifacts",
"in",
"artifact_map",
"under",
"base_dir",
"and",
"return",
"the",
"number",
"of",
"artifacts",
"linked",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/artifact.py#L143-L152 | train | 44,172 |
resonai/ybt | setup.py | get_readme | def get_readme():
"""Read and return the content of the project README file."""
base_dir = path.abspath(path.dirname(__file__))
with open(path.join(base_dir, 'README.md'), encoding='utf-8') as readme_f:
return readme_f.read() | python | def get_readme():
"""Read and return the content of the project README file."""
base_dir = path.abspath(path.dirname(__file__))
with open(path.join(base_dir, 'README.md'), encoding='utf-8') as readme_f:
return readme_f.read() | [
"def",
"get_readme",
"(",
")",
":",
"base_dir",
"=",
"path",
".",
"abspath",
"(",
"path",
".",
"dirname",
"(",
"__file__",
")",
")",
"with",
"open",
"(",
"path",
".",
"join",
"(",
"base_dir",
",",
"'README.md'",
")",
",",
"encoding",
"=",
"'utf-8'",
... | Read and return the content of the project README file. | [
"Read",
"and",
"return",
"the",
"content",
"of",
"the",
"project",
"README",
"file",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/setup.py#L16-L20 | train | 44,173 |
resonai/ybt | yabt/target_extraction.py | args_to_props | def args_to_props(target: Target, builder: Builder, args: list, kwargs: dict):
"""Convert build file `args` and `kwargs` to `target` props.
Use builder signature to validate builder usage in build-file, raising
appropriate exceptions on signature-mismatches.
Use builder signature default values to assign props values to args that
were not passed in the build-file call.
This function handles only the arg/kwargs-to-prop assignment, including
default values when necessary. When it returns, if no exception was raised,
it is guaranteed that `target.props` contains all args defined in the
builder registered signature, with values taken either from the build-file
call, or from default values provided in the signature.
Specifically, this function DOES NOT do anything about the arg types
defined in the builder signature.
:raise TypeError: On signature-call mismatch.
"""
if len(args) > len(builder.sig):
# too many positional arguments supplied - say how many we can take
raise TypeError('{}() takes {}, but {} were given'
.format(target.builder_name,
format_num_positional_arguments(builder),
len(args)))
# read given args into the matching props according to the signature
for arg_name, value in zip(builder.sig.keys(), args):
target.props[arg_name] = value
# read given kwargs into the named props, asserting matching sig arg names
for arg_name, value in kwargs.items():
if arg_name not in builder.sig:
raise TypeError("{}() got an unexpected keyword argument '{}'"
.format(target.builder_name, arg_name))
if arg_name in target.props:
raise TypeError("{}() got multiple values for argument '{}'"
.format(target.builder_name, arg_name))
target.props[arg_name] = value
# go over signature args, assigning default values to anything that wasn't
# assigned from args / kwargs, making sure no positional args are missing
missing_args = []
for arg_name, sig_spec in builder.sig.items():
if arg_name not in target.props:
if sig_spec.default == Empty:
missing_args.append(arg_name)
else:
target.props[arg_name] = sig_spec.default
if missing_args:
# not enough positional arguments supplied - say which
# TODO(itamar): match Python's error more closely (last "and "):
# foo() missing 3 required positional arguments: 'a', 'b', and 'c'
# TODO(itamar): use inflect
raise TypeError('{}() missing {} required positional argument{}: {}'
.format(target.builder_name, len(missing_args),
's' if len(missing_args) > 1 else '',
', '.join("'{}'".format(arg)
for arg in missing_args)))
logger.debug('Got props for target: {}', target) | python | def args_to_props(target: Target, builder: Builder, args: list, kwargs: dict):
"""Convert build file `args` and `kwargs` to `target` props.
Use builder signature to validate builder usage in build-file, raising
appropriate exceptions on signature-mismatches.
Use builder signature default values to assign props values to args that
were not passed in the build-file call.
This function handles only the arg/kwargs-to-prop assignment, including
default values when necessary. When it returns, if no exception was raised,
it is guaranteed that `target.props` contains all args defined in the
builder registered signature, with values taken either from the build-file
call, or from default values provided in the signature.
Specifically, this function DOES NOT do anything about the arg types
defined in the builder signature.
:raise TypeError: On signature-call mismatch.
"""
if len(args) > len(builder.sig):
# too many positional arguments supplied - say how many we can take
raise TypeError('{}() takes {}, but {} were given'
.format(target.builder_name,
format_num_positional_arguments(builder),
len(args)))
# read given args into the matching props according to the signature
for arg_name, value in zip(builder.sig.keys(), args):
target.props[arg_name] = value
# read given kwargs into the named props, asserting matching sig arg names
for arg_name, value in kwargs.items():
if arg_name not in builder.sig:
raise TypeError("{}() got an unexpected keyword argument '{}'"
.format(target.builder_name, arg_name))
if arg_name in target.props:
raise TypeError("{}() got multiple values for argument '{}'"
.format(target.builder_name, arg_name))
target.props[arg_name] = value
# go over signature args, assigning default values to anything that wasn't
# assigned from args / kwargs, making sure no positional args are missing
missing_args = []
for arg_name, sig_spec in builder.sig.items():
if arg_name not in target.props:
if sig_spec.default == Empty:
missing_args.append(arg_name)
else:
target.props[arg_name] = sig_spec.default
if missing_args:
# not enough positional arguments supplied - say which
# TODO(itamar): match Python's error more closely (last "and "):
# foo() missing 3 required positional arguments: 'a', 'b', and 'c'
# TODO(itamar): use inflect
raise TypeError('{}() missing {} required positional argument{}: {}'
.format(target.builder_name, len(missing_args),
's' if len(missing_args) > 1 else '',
', '.join("'{}'".format(arg)
for arg in missing_args)))
logger.debug('Got props for target: {}', target) | [
"def",
"args_to_props",
"(",
"target",
":",
"Target",
",",
"builder",
":",
"Builder",
",",
"args",
":",
"list",
",",
"kwargs",
":",
"dict",
")",
":",
"if",
"len",
"(",
"args",
")",
">",
"len",
"(",
"builder",
".",
"sig",
")",
":",
"# too many positio... | Convert build file `args` and `kwargs` to `target` props.
Use builder signature to validate builder usage in build-file, raising
appropriate exceptions on signature-mismatches.
Use builder signature default values to assign props values to args that
were not passed in the build-file call.
This function handles only the arg/kwargs-to-prop assignment, including
default values when necessary. When it returns, if no exception was raised,
it is guaranteed that `target.props` contains all args defined in the
builder registered signature, with values taken either from the build-file
call, or from default values provided in the signature.
Specifically, this function DOES NOT do anything about the arg types
defined in the builder signature.
:raise TypeError: On signature-call mismatch. | [
"Convert",
"build",
"file",
"args",
"and",
"kwargs",
"to",
"target",
"props",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/target_extraction.py#L50-L107 | train | 44,174 |
resonai/ybt | yabt/target_extraction.py | extractor | def extractor(
builder_name: str, builder: Builder, build_file_path: str,
build_context) -> types.FunctionType:
"""Return a target extraction function for a specific builder and a
specific build file."""
build_module = to_build_module(build_file_path, build_context.conf)
def extract_target(*args, **kwargs):
"""The actual target extraction function that is executed when any
builder function is called in a build file."""
target = Target(builder_name=builder_name)
# convert args/kwargs to target.props and handle arg types
args_to_props(target, builder, args, kwargs)
raw_name = target.props.name
handle_typed_args(target, builder, build_module)
logger.debug('Extracting target: {}', target)
# promote the `name` and `deps` from props to the target instance
target.name = target.props.pop('name')
target.deps = target.props.pop('deps', [])
if target.deps:
logger.debug('Got deps for target "{0.name}": {0.deps}', target)
# invoke builder hooks on extracted target
for hook_name, hook in Plugin.get_hooks_for_builder(builder_name):
logger.debug('About to invoke hook {} on target {}',
hook_name, target)
hook(build_context, target)
# save the target in the build context
build_context.register_target(target)
logger.debug('Registered {}', target)
return extract_target | python | def extractor(
builder_name: str, builder: Builder, build_file_path: str,
build_context) -> types.FunctionType:
"""Return a target extraction function for a specific builder and a
specific build file."""
build_module = to_build_module(build_file_path, build_context.conf)
def extract_target(*args, **kwargs):
"""The actual target extraction function that is executed when any
builder function is called in a build file."""
target = Target(builder_name=builder_name)
# convert args/kwargs to target.props and handle arg types
args_to_props(target, builder, args, kwargs)
raw_name = target.props.name
handle_typed_args(target, builder, build_module)
logger.debug('Extracting target: {}', target)
# promote the `name` and `deps` from props to the target instance
target.name = target.props.pop('name')
target.deps = target.props.pop('deps', [])
if target.deps:
logger.debug('Got deps for target "{0.name}": {0.deps}', target)
# invoke builder hooks on extracted target
for hook_name, hook in Plugin.get_hooks_for_builder(builder_name):
logger.debug('About to invoke hook {} on target {}',
hook_name, target)
hook(build_context, target)
# save the target in the build context
build_context.register_target(target)
logger.debug('Registered {}', target)
return extract_target | [
"def",
"extractor",
"(",
"builder_name",
":",
"str",
",",
"builder",
":",
"Builder",
",",
"build_file_path",
":",
"str",
",",
"build_context",
")",
"->",
"types",
".",
"FunctionType",
":",
"build_module",
"=",
"to_build_module",
"(",
"build_file_path",
",",
"b... | Return a target extraction function for a specific builder and a
specific build file. | [
"Return",
"a",
"target",
"extraction",
"function",
"for",
"a",
"specific",
"builder",
"and",
"a",
"specific",
"build",
"file",
"."
] | 5b40df0922ef3383eb85f2b04a26a2db4b81b3fd | https://github.com/resonai/ybt/blob/5b40df0922ef3383eb85f2b04a26a2db4b81b3fd/yabt/target_extraction.py#L163-L193 | train | 44,175 |
softwarefactory-project/rdopkg | rdopkg/cli.py | rdopkg_runner | def rdopkg_runner():
"""
default rdopkg action runner including rdopkg action modules
"""
aman = ActionManager()
# assume all actions.* modules are action modules
aman.add_actions_modules(actions)
aman.fill_aliases()
# additional rdopkg action module logic should go here
return ActionRunner(action_manager=aman) | python | def rdopkg_runner():
"""
default rdopkg action runner including rdopkg action modules
"""
aman = ActionManager()
# assume all actions.* modules are action modules
aman.add_actions_modules(actions)
aman.fill_aliases()
# additional rdopkg action module logic should go here
return ActionRunner(action_manager=aman) | [
"def",
"rdopkg_runner",
"(",
")",
":",
"aman",
"=",
"ActionManager",
"(",
")",
"# assume all actions.* modules are action modules",
"aman",
".",
"add_actions_modules",
"(",
"actions",
")",
"aman",
".",
"fill_aliases",
"(",
")",
"# additional rdopkg action module logic sho... | default rdopkg action runner including rdopkg action modules | [
"default",
"rdopkg",
"action",
"runner",
"including",
"rdopkg",
"action",
"modules"
] | 2d2bed4e7cd329558a36d0dd404ec4ac8f9f254c | https://github.com/softwarefactory-project/rdopkg/blob/2d2bed4e7cd329558a36d0dd404ec4ac8f9f254c/rdopkg/cli.py#L10-L19 | train | 44,176 |
softwarefactory-project/rdopkg | rdopkg/cli.py | rdopkg | def rdopkg(*cargs):
"""
rdopkg CLI interface
Execute rdopkg action with specified arguments and return
shell friendly exit code.
This is the default high level way to interact with rdopkg.
py> rdopkg('new-version', '1.2.3')
is equivalent to
$> rdopkg new-version 1.2.3
"""
runner = rdopkg_runner()
return shell.run(runner,
cargs=cargs,
prog='rdopkg',
version=__version__) | python | def rdopkg(*cargs):
"""
rdopkg CLI interface
Execute rdopkg action with specified arguments and return
shell friendly exit code.
This is the default high level way to interact with rdopkg.
py> rdopkg('new-version', '1.2.3')
is equivalent to
$> rdopkg new-version 1.2.3
"""
runner = rdopkg_runner()
return shell.run(runner,
cargs=cargs,
prog='rdopkg',
version=__version__) | [
"def",
"rdopkg",
"(",
"*",
"cargs",
")",
":",
"runner",
"=",
"rdopkg_runner",
"(",
")",
"return",
"shell",
".",
"run",
"(",
"runner",
",",
"cargs",
"=",
"cargs",
",",
"prog",
"=",
"'rdopkg'",
",",
"version",
"=",
"__version__",
")"
] | rdopkg CLI interface
Execute rdopkg action with specified arguments and return
shell friendly exit code.
This is the default high level way to interact with rdopkg.
py> rdopkg('new-version', '1.2.3')
is equivalent to
$> rdopkg new-version 1.2.3 | [
"rdopkg",
"CLI",
"interface"
] | 2d2bed4e7cd329558a36d0dd404ec4ac8f9f254c | https://github.com/softwarefactory-project/rdopkg/blob/2d2bed4e7cd329558a36d0dd404ec4ac8f9f254c/rdopkg/cli.py#L22-L41 | train | 44,177 |
infothrill/python-dyndnsc | dyndnsc/core.py | getDynDnsClientForConfig | def getDynDnsClientForConfig(config, plugins=None):
"""Instantiate and return a complete and working dyndns client.
:param config: a dictionary with configuration keys
:param plugins: an object that implements PluginManager
"""
initparams = {}
if "interval" in config:
initparams["detect_interval"] = config["interval"]
if plugins is not None:
initparams["plugins"] = plugins
if "updater" in config:
for updater_name, updater_options in config["updater"]:
initparams["updater"] = get_updater_class(updater_name)(**updater_options)
# find class and instantiate the detector:
if "detector" in config:
detector_name, detector_opts = config["detector"][-1]
try:
klass = get_detector_class(detector_name)
except KeyError as exc:
LOG.warning("Invalid change detector configuration: '%s'",
detector_name, exc_info=exc)
return None
thedetector = klass(**detector_opts)
initparams["detector"] = thedetector
return DynDnsClient(**initparams) | python | def getDynDnsClientForConfig(config, plugins=None):
"""Instantiate and return a complete and working dyndns client.
:param config: a dictionary with configuration keys
:param plugins: an object that implements PluginManager
"""
initparams = {}
if "interval" in config:
initparams["detect_interval"] = config["interval"]
if plugins is not None:
initparams["plugins"] = plugins
if "updater" in config:
for updater_name, updater_options in config["updater"]:
initparams["updater"] = get_updater_class(updater_name)(**updater_options)
# find class and instantiate the detector:
if "detector" in config:
detector_name, detector_opts = config["detector"][-1]
try:
klass = get_detector_class(detector_name)
except KeyError as exc:
LOG.warning("Invalid change detector configuration: '%s'",
detector_name, exc_info=exc)
return None
thedetector = klass(**detector_opts)
initparams["detector"] = thedetector
return DynDnsClient(**initparams) | [
"def",
"getDynDnsClientForConfig",
"(",
"config",
",",
"plugins",
"=",
"None",
")",
":",
"initparams",
"=",
"{",
"}",
"if",
"\"interval\"",
"in",
"config",
":",
"initparams",
"[",
"\"detect_interval\"",
"]",
"=",
"config",
"[",
"\"interval\"",
"]",
"if",
"pl... | Instantiate and return a complete and working dyndns client.
:param config: a dictionary with configuration keys
:param plugins: an object that implements PluginManager | [
"Instantiate",
"and",
"return",
"a",
"complete",
"and",
"working",
"dyndns",
"client",
"."
] | 2196d48aa6098da9835a7611fbdb0b5f0fbf51e4 | https://github.com/infothrill/python-dyndnsc/blob/2196d48aa6098da9835a7611fbdb0b5f0fbf51e4/dyndnsc/core.py#L163-L192 | train | 44,178 |
infothrill/python-dyndnsc | dyndnsc/core.py | DynDnsClient.has_state_changed | def has_state_changed(self):
"""
Detect changes in offline detector and real DNS value.
Detect a change either in the offline detector or a
difference between the real DNS value and what the online
detector last got.
This is efficient, since it only generates minimal dns traffic
for online detectors and no traffic at all for offline detectors.
:rtype: boolean
"""
self.lastcheck = time.time()
# prefer offline state change detection:
if self.detector.can_detect_offline():
self.detector.detect()
elif not self.dns.detect() == self.detector.get_current_value():
# The following produces traffic, but probably less traffic
# overall than the detector
self.detector.detect()
if self.detector.has_changed():
LOG.debug("detector changed")
return True
elif self.dns.has_changed():
LOG.debug("dns changed")
return True
return False | python | def has_state_changed(self):
"""
Detect changes in offline detector and real DNS value.
Detect a change either in the offline detector or a
difference between the real DNS value and what the online
detector last got.
This is efficient, since it only generates minimal dns traffic
for online detectors and no traffic at all for offline detectors.
:rtype: boolean
"""
self.lastcheck = time.time()
# prefer offline state change detection:
if self.detector.can_detect_offline():
self.detector.detect()
elif not self.dns.detect() == self.detector.get_current_value():
# The following produces traffic, but probably less traffic
# overall than the detector
self.detector.detect()
if self.detector.has_changed():
LOG.debug("detector changed")
return True
elif self.dns.has_changed():
LOG.debug("dns changed")
return True
return False | [
"def",
"has_state_changed",
"(",
"self",
")",
":",
"self",
".",
"lastcheck",
"=",
"time",
".",
"time",
"(",
")",
"# prefer offline state change detection:",
"if",
"self",
".",
"detector",
".",
"can_detect_offline",
"(",
")",
":",
"self",
".",
"detector",
".",
... | Detect changes in offline detector and real DNS value.
Detect a change either in the offline detector or a
difference between the real DNS value and what the online
detector last got.
This is efficient, since it only generates minimal dns traffic
for online detectors and no traffic at all for offline detectors.
:rtype: boolean | [
"Detect",
"changes",
"in",
"offline",
"detector",
"and",
"real",
"DNS",
"value",
"."
] | 2196d48aa6098da9835a7611fbdb0b5f0fbf51e4 | https://github.com/infothrill/python-dyndnsc/blob/2196d48aa6098da9835a7611fbdb0b5f0fbf51e4/dyndnsc/core.py#L85-L113 | train | 44,179 |
infothrill/python-dyndnsc | dyndnsc/detector/webcheck.py | IPDetectorWebCheckBase.detect | def detect(self):
"""
Try to contact a remote webservice and parse the returned output.
Determine the IP address from the parsed output and return.
"""
if self.opts_url and self.opts_parser:
url = self.opts_url
parser = self.opts_parser
else:
url, parser = choice(self.urls) # noqa: S311
parser = globals().get("_parser_" + parser)
theip = _get_ip_from_url(url, parser)
if theip is None:
LOG.info("Could not detect IP using webcheck! Offline?")
self.set_current_value(theip)
return theip | python | def detect(self):
"""
Try to contact a remote webservice and parse the returned output.
Determine the IP address from the parsed output and return.
"""
if self.opts_url and self.opts_parser:
url = self.opts_url
parser = self.opts_parser
else:
url, parser = choice(self.urls) # noqa: S311
parser = globals().get("_parser_" + parser)
theip = _get_ip_from_url(url, parser)
if theip is None:
LOG.info("Could not detect IP using webcheck! Offline?")
self.set_current_value(theip)
return theip | [
"def",
"detect",
"(",
"self",
")",
":",
"if",
"self",
".",
"opts_url",
"and",
"self",
".",
"opts_parser",
":",
"url",
"=",
"self",
".",
"opts_url",
"parser",
"=",
"self",
".",
"opts_parser",
"else",
":",
"url",
",",
"parser",
"=",
"choice",
"(",
"sel... | Try to contact a remote webservice and parse the returned output.
Determine the IP address from the parsed output and return. | [
"Try",
"to",
"contact",
"a",
"remote",
"webservice",
"and",
"parse",
"the",
"returned",
"output",
"."
] | 2196d48aa6098da9835a7611fbdb0b5f0fbf51e4 | https://github.com/infothrill/python-dyndnsc/blob/2196d48aa6098da9835a7611fbdb0b5f0fbf51e4/dyndnsc/detector/webcheck.py#L95-L111 | train | 44,180 |
infothrill/python-dyndnsc | dyndnsc/plugins/manager.py | PluginProxy.add_plugin | def add_plugin(self, plugin, call):
"""Add plugin to list of plugins.
Will be added if it has the attribute I'm bound to.
"""
meth = getattr(plugin, call, None)
if meth is not None:
self.plugins.append((plugin, meth)) | python | def add_plugin(self, plugin, call):
"""Add plugin to list of plugins.
Will be added if it has the attribute I'm bound to.
"""
meth = getattr(plugin, call, None)
if meth is not None:
self.plugins.append((plugin, meth)) | [
"def",
"add_plugin",
"(",
"self",
",",
"plugin",
",",
"call",
")",
":",
"meth",
"=",
"getattr",
"(",
"plugin",
",",
"call",
",",
"None",
")",
"if",
"meth",
"is",
"not",
"None",
":",
"self",
".",
"plugins",
".",
"append",
"(",
"(",
"plugin",
",",
... | Add plugin to list of plugins.
Will be added if it has the attribute I'm bound to. | [
"Add",
"plugin",
"to",
"list",
"of",
"plugins",
"."
] | 2196d48aa6098da9835a7611fbdb0b5f0fbf51e4 | https://github.com/infothrill/python-dyndnsc/blob/2196d48aa6098da9835a7611fbdb0b5f0fbf51e4/dyndnsc/plugins/manager.py#L38-L45 | train | 44,181 |
infothrill/python-dyndnsc | dyndnsc/plugins/manager.py | PluginProxy.listcall | def listcall(self, *arg, **kw):
"""Call each plugin sequentially.
Return the first result that is not None.
"""
final_result = None
for _, meth in self.plugins:
result = meth(*arg, **kw)
if final_result is None and result is not None:
final_result = result
return final_result | python | def listcall(self, *arg, **kw):
"""Call each plugin sequentially.
Return the first result that is not None.
"""
final_result = None
for _, meth in self.plugins:
result = meth(*arg, **kw)
if final_result is None and result is not None:
final_result = result
return final_result | [
"def",
"listcall",
"(",
"self",
",",
"*",
"arg",
",",
"*",
"*",
"kw",
")",
":",
"final_result",
"=",
"None",
"for",
"_",
",",
"meth",
"in",
"self",
".",
"plugins",
":",
"result",
"=",
"meth",
"(",
"*",
"arg",
",",
"*",
"*",
"kw",
")",
"if",
"... | Call each plugin sequentially.
Return the first result that is not None. | [
"Call",
"each",
"plugin",
"sequentially",
"."
] | 2196d48aa6098da9835a7611fbdb0b5f0fbf51e4 | https://github.com/infothrill/python-dyndnsc/blob/2196d48aa6098da9835a7611fbdb0b5f0fbf51e4/dyndnsc/plugins/manager.py#L47-L57 | train | 44,182 |
infothrill/python-dyndnsc | dyndnsc/plugins/manager.py | PluginManager.add_plugin | def add_plugin(self, plugin):
"""Add the given plugin."""
# allow plugins loaded via entry points to override builtin plugins
new_name = self.plugin_name(plugin)
self._plugins[:] = [p for p in self._plugins
if self.plugin_name(p) != new_name]
self._plugins.append(plugin) | python | def add_plugin(self, plugin):
"""Add the given plugin."""
# allow plugins loaded via entry points to override builtin plugins
new_name = self.plugin_name(plugin)
self._plugins[:] = [p for p in self._plugins
if self.plugin_name(p) != new_name]
self._plugins.append(plugin) | [
"def",
"add_plugin",
"(",
"self",
",",
"plugin",
")",
":",
"# allow plugins loaded via entry points to override builtin plugins",
"new_name",
"=",
"self",
".",
"plugin_name",
"(",
"plugin",
")",
"self",
".",
"_plugins",
"[",
":",
"]",
"=",
"[",
"p",
"for",
"p",
... | Add the given plugin. | [
"Add",
"the",
"given",
"plugin",
"."
] | 2196d48aa6098da9835a7611fbdb0b5f0fbf51e4 | https://github.com/infothrill/python-dyndnsc/blob/2196d48aa6098da9835a7611fbdb0b5f0fbf51e4/dyndnsc/plugins/manager.py#L129-L135 | train | 44,183 |
infothrill/python-dyndnsc | dyndnsc/plugins/manager.py | PluginManager.configure | def configure(self, args):
"""Configure the set of plugins with the given args.
After configuration, disabled plugins are removed from the plugins list.
"""
for plug in self._plugins:
plug_name = self.plugin_name(plug)
plug.enabled = getattr(args, "plugin_%s" % plug_name, False)
if plug.enabled and getattr(plug, "configure", None):
if callable(getattr(plug, "configure", None)):
plug.configure(args)
LOG.debug("Available plugins: %s", self._plugins)
self.plugins = [plugin for plugin in self._plugins if getattr(plugin, "enabled", False)]
LOG.debug("Enabled plugins: %s", self.plugins) | python | def configure(self, args):
"""Configure the set of plugins with the given args.
After configuration, disabled plugins are removed from the plugins list.
"""
for plug in self._plugins:
plug_name = self.plugin_name(plug)
plug.enabled = getattr(args, "plugin_%s" % plug_name, False)
if plug.enabled and getattr(plug, "configure", None):
if callable(getattr(plug, "configure", None)):
plug.configure(args)
LOG.debug("Available plugins: %s", self._plugins)
self.plugins = [plugin for plugin in self._plugins if getattr(plugin, "enabled", False)]
LOG.debug("Enabled plugins: %s", self.plugins) | [
"def",
"configure",
"(",
"self",
",",
"args",
")",
":",
"for",
"plug",
"in",
"self",
".",
"_plugins",
":",
"plug_name",
"=",
"self",
".",
"plugin_name",
"(",
"plug",
")",
"plug",
".",
"enabled",
"=",
"getattr",
"(",
"args",
",",
"\"plugin_%s\"",
"%",
... | Configure the set of plugins with the given args.
After configuration, disabled plugins are removed from the plugins list. | [
"Configure",
"the",
"set",
"of",
"plugins",
"with",
"the",
"given",
"args",
"."
] | 2196d48aa6098da9835a7611fbdb0b5f0fbf51e4 | https://github.com/infothrill/python-dyndnsc/blob/2196d48aa6098da9835a7611fbdb0b5f0fbf51e4/dyndnsc/plugins/manager.py#L147-L160 | train | 44,184 |
infothrill/python-dyndnsc | dyndnsc/plugins/manager.py | PluginManager.options | def options(self, parser, env):
"""Register commandline options with the given parser.
Implement this method for normal options behavior with protection from
OptionConflictErrors. If you override this method and want the default
--with-$name option to be registered, be sure to call super().
:param parser: argparse parser object
:param env:
"""
def get_help(plug):
"""Extract the help docstring from the given plugin."""
import textwrap
if plug.__class__.__doc__:
# doc sections are often indented; compress the spaces
return textwrap.dedent(plug.__class__.__doc__)
return "(no help available)"
for plug in self._plugins:
env_opt = ENV_PREFIX + self.plugin_name(plug).upper()
env_opt = env_opt.replace("-", "_")
parser.add_argument("--with-%s" % self.plugin_name(plug),
action="store_true",
dest="plugin_%s" % self.plugin_name(plug),
default=env.get(env_opt),
help="Enable plugin %s: %s [%s]" %
(plug.__class__.__name__, get_help(plug), env_opt)) | python | def options(self, parser, env):
"""Register commandline options with the given parser.
Implement this method for normal options behavior with protection from
OptionConflictErrors. If you override this method and want the default
--with-$name option to be registered, be sure to call super().
:param parser: argparse parser object
:param env:
"""
def get_help(plug):
"""Extract the help docstring from the given plugin."""
import textwrap
if plug.__class__.__doc__:
# doc sections are often indented; compress the spaces
return textwrap.dedent(plug.__class__.__doc__)
return "(no help available)"
for plug in self._plugins:
env_opt = ENV_PREFIX + self.plugin_name(plug).upper()
env_opt = env_opt.replace("-", "_")
parser.add_argument("--with-%s" % self.plugin_name(plug),
action="store_true",
dest="plugin_%s" % self.plugin_name(plug),
default=env.get(env_opt),
help="Enable plugin %s: %s [%s]" %
(plug.__class__.__name__, get_help(plug), env_opt)) | [
"def",
"options",
"(",
"self",
",",
"parser",
",",
"env",
")",
":",
"def",
"get_help",
"(",
"plug",
")",
":",
"\"\"\"Extract the help docstring from the given plugin.\"\"\"",
"import",
"textwrap",
"if",
"plug",
".",
"__class__",
".",
"__doc__",
":",
"# doc section... | Register commandline options with the given parser.
Implement this method for normal options behavior with protection from
OptionConflictErrors. If you override this method and want the default
--with-$name option to be registered, be sure to call super().
:param parser: argparse parser object
:param env: | [
"Register",
"commandline",
"options",
"with",
"the",
"given",
"parser",
"."
] | 2196d48aa6098da9835a7611fbdb0b5f0fbf51e4 | https://github.com/infothrill/python-dyndnsc/blob/2196d48aa6098da9835a7611fbdb0b5f0fbf51e4/dyndnsc/plugins/manager.py#L162-L187 | train | 44,185 |
infothrill/python-dyndnsc | dyndnsc/plugins/manager.py | BuiltinPluginManager.load_plugins | def load_plugins(self):
"""Load plugins from `dyndnsc.plugins.builtin`."""
from dyndnsc.plugins.builtin import PLUGINS
for plugin in PLUGINS:
self.add_plugin(plugin())
super(BuiltinPluginManager, self).load_plugins() | python | def load_plugins(self):
"""Load plugins from `dyndnsc.plugins.builtin`."""
from dyndnsc.plugins.builtin import PLUGINS
for plugin in PLUGINS:
self.add_plugin(plugin())
super(BuiltinPluginManager, self).load_plugins() | [
"def",
"load_plugins",
"(",
"self",
")",
":",
"from",
"dyndnsc",
".",
"plugins",
".",
"builtin",
"import",
"PLUGINS",
"for",
"plugin",
"in",
"PLUGINS",
":",
"self",
".",
"add_plugin",
"(",
"plugin",
"(",
")",
")",
"super",
"(",
"BuiltinPluginManager",
",",... | Load plugins from `dyndnsc.plugins.builtin`. | [
"Load",
"plugins",
"from",
"dyndnsc",
".",
"plugins",
".",
"builtin",
"."
] | 2196d48aa6098da9835a7611fbdb0b5f0fbf51e4 | https://github.com/infothrill/python-dyndnsc/blob/2196d48aa6098da9835a7611fbdb0b5f0fbf51e4/dyndnsc/plugins/manager.py#L239-L244 | train | 44,186 |
GreenBuildingRegistry/yaml-config | yamlconf/config.py | Config.keys | def keys(self, section=None):
"""Provide dict like keys method"""
if not section and self.section:
section = self.section
config = self.config.get(section, {}) if section else self.config
return config.keys() | python | def keys(self, section=None):
"""Provide dict like keys method"""
if not section and self.section:
section = self.section
config = self.config.get(section, {}) if section else self.config
return config.keys() | [
"def",
"keys",
"(",
"self",
",",
"section",
"=",
"None",
")",
":",
"if",
"not",
"section",
"and",
"self",
".",
"section",
":",
"section",
"=",
"self",
".",
"section",
"config",
"=",
"self",
".",
"config",
".",
"get",
"(",
"section",
",",
"{",
"}",
... | Provide dict like keys method | [
"Provide",
"dict",
"like",
"keys",
"method"
] | 3d4bf4cadd07d4c3b71674077bd7cf16efb6ea10 | https://github.com/GreenBuildingRegistry/yaml-config/blob/3d4bf4cadd07d4c3b71674077bd7cf16efb6ea10/yamlconf/config.py#L142-L147 | train | 44,187 |
GreenBuildingRegistry/yaml-config | yamlconf/config.py | Config.items | def items(self, section=None):
"""Provide dict like items method"""
if not section and self.section:
section = self.section
config = self.config.get(section, {}) if section else self.config
return config.items() | python | def items(self, section=None):
"""Provide dict like items method"""
if not section and self.section:
section = self.section
config = self.config.get(section, {}) if section else self.config
return config.items() | [
"def",
"items",
"(",
"self",
",",
"section",
"=",
"None",
")",
":",
"if",
"not",
"section",
"and",
"self",
".",
"section",
":",
"section",
"=",
"self",
".",
"section",
"config",
"=",
"self",
".",
"config",
".",
"get",
"(",
"section",
",",
"{",
"}",... | Provide dict like items method | [
"Provide",
"dict",
"like",
"items",
"method"
] | 3d4bf4cadd07d4c3b71674077bd7cf16efb6ea10 | https://github.com/GreenBuildingRegistry/yaml-config/blob/3d4bf4cadd07d4c3b71674077bd7cf16efb6ea10/yamlconf/config.py#L149-L154 | train | 44,188 |
GreenBuildingRegistry/yaml-config | yamlconf/config.py | Config.values | def values(self, section=None):
"""Provide dict like values method"""
if not section and self.section:
section = self.section
config = self.config.get(section, {}) if section else self.config
return config.values() | python | def values(self, section=None):
"""Provide dict like values method"""
if not section and self.section:
section = self.section
config = self.config.get(section, {}) if section else self.config
return config.values() | [
"def",
"values",
"(",
"self",
",",
"section",
"=",
"None",
")",
":",
"if",
"not",
"section",
"and",
"self",
".",
"section",
":",
"section",
"=",
"self",
".",
"section",
"config",
"=",
"self",
".",
"config",
".",
"get",
"(",
"section",
",",
"{",
"}"... | Provide dict like values method | [
"Provide",
"dict",
"like",
"values",
"method"
] | 3d4bf4cadd07d4c3b71674077bd7cf16efb6ea10 | https://github.com/GreenBuildingRegistry/yaml-config/blob/3d4bf4cadd07d4c3b71674077bd7cf16efb6ea10/yamlconf/config.py#L156-L161 | train | 44,189 |
GreenBuildingRegistry/yaml-config | yamlconf/config.py | Config._get_filepath | def _get_filepath(self, filename=None, config_dir=None):
"""
Get config file.
:param filename: name of config file (not path)
:param config_dir: dir name prepended to file name.
Note: we use e.g. GBR_CONFIG_DIR here, this is the default
value in GBR but it is actually self.env_prefix + '_DIR' etc.
If config_dir is not supplied it will be set to the value of the
environment variable GBR_CONFIG_DIR or None.
If filename is not supplied and the environment variable GBR_CONFIG
is set and contains a path, its value will be tested to see if a file
exists, if so that is returned as the config file otherwise filename
will be set to GBR_CONFIG, if it exists, otherwise 'config.yaml'.
If a filename is supplied or GBR_CONFIG is not an existing file:
If the environment variable GBR_CONFIG_PATH exists the path
GBR_CONFIG_PATH/config_dir/filename is checked.
If it doesn't exist config/CONFIG_DIR/filename is checked
(relative to the root of the (GBR) repo)
finally GBR_CONFIG_DEFAULT/CONFIG_DIR/filename is tried
If no file is found None will be returned.
"""
# pylint: disable=no-self-use
config_file = None
config_dir_env_var = self.env_prefix + '_DIR'
if not filename:
# Check env vars for config
filename = os.getenv(self.env_prefix, default=self.default_file)
# contains path so try directly
if os.path.dirname(filename) and os.path.exists(filename):
config_file = filename
if not config_file:
# Cannot contain path
filename = os.path.basename(filename)
if not config_dir:
config_dir = os.getenv(config_dir_env_var, default='')
for path in [self.basepath, self.config_root]:
filepath = os.path.join(path, config_dir, filename)
if os.path.exists(filepath):
config_file = filepath
break
return config_file | python | def _get_filepath(self, filename=None, config_dir=None):
"""
Get config file.
:param filename: name of config file (not path)
:param config_dir: dir name prepended to file name.
Note: we use e.g. GBR_CONFIG_DIR here, this is the default
value in GBR but it is actually self.env_prefix + '_DIR' etc.
If config_dir is not supplied it will be set to the value of the
environment variable GBR_CONFIG_DIR or None.
If filename is not supplied and the environment variable GBR_CONFIG
is set and contains a path, its value will be tested to see if a file
exists, if so that is returned as the config file otherwise filename
will be set to GBR_CONFIG, if it exists, otherwise 'config.yaml'.
If a filename is supplied or GBR_CONFIG is not an existing file:
If the environment variable GBR_CONFIG_PATH exists the path
GBR_CONFIG_PATH/config_dir/filename is checked.
If it doesn't exist config/CONFIG_DIR/filename is checked
(relative to the root of the (GBR) repo)
finally GBR_CONFIG_DEFAULT/CONFIG_DIR/filename is tried
If no file is found None will be returned.
"""
# pylint: disable=no-self-use
config_file = None
config_dir_env_var = self.env_prefix + '_DIR'
if not filename:
# Check env vars for config
filename = os.getenv(self.env_prefix, default=self.default_file)
# contains path so try directly
if os.path.dirname(filename) and os.path.exists(filename):
config_file = filename
if not config_file:
# Cannot contain path
filename = os.path.basename(filename)
if not config_dir:
config_dir = os.getenv(config_dir_env_var, default='')
for path in [self.basepath, self.config_root]:
filepath = os.path.join(path, config_dir, filename)
if os.path.exists(filepath):
config_file = filepath
break
return config_file | [
"def",
"_get_filepath",
"(",
"self",
",",
"filename",
"=",
"None",
",",
"config_dir",
"=",
"None",
")",
":",
"# pylint: disable=no-self-use",
"config_file",
"=",
"None",
"config_dir_env_var",
"=",
"self",
".",
"env_prefix",
"+",
"'_DIR'",
"if",
"not",
"filename"... | Get config file.
:param filename: name of config file (not path)
:param config_dir: dir name prepended to file name.
Note: we use e.g. GBR_CONFIG_DIR here, this is the default
value in GBR but it is actually self.env_prefix + '_DIR' etc.
If config_dir is not supplied it will be set to the value of the
environment variable GBR_CONFIG_DIR or None.
If filename is not supplied and the environment variable GBR_CONFIG
is set and contains a path, its value will be tested to see if a file
exists, if so that is returned as the config file otherwise filename
will be set to GBR_CONFIG, if it exists, otherwise 'config.yaml'.
If a filename is supplied or GBR_CONFIG is not an existing file:
If the environment variable GBR_CONFIG_PATH exists the path
GBR_CONFIG_PATH/config_dir/filename is checked.
If it doesn't exist config/CONFIG_DIR/filename is checked
(relative to the root of the (GBR) repo)
finally GBR_CONFIG_DEFAULT/CONFIG_DIR/filename is tried
If no file is found None will be returned. | [
"Get",
"config",
"file",
"."
] | 3d4bf4cadd07d4c3b71674077bd7cf16efb6ea10 | https://github.com/GreenBuildingRegistry/yaml-config/blob/3d4bf4cadd07d4c3b71674077bd7cf16efb6ea10/yamlconf/config.py#L177-L225 | train | 44,190 |
infothrill/python-dyndnsc | dyndnsc/detector/dns.py | resolve | def resolve(hostname, family=AF_UNSPEC):
"""
Resolve hostname to one or more IP addresses through the operating system.
Resolution is carried out for the given address family. If no
address family is specified, only IPv4 and IPv6 addresses are returned. If
multiple IP addresses are found, all are returned.
:param family: AF_INET or AF_INET6 or AF_UNSPEC (default)
:return: tuple of unique IP addresses
"""
af_ok = (AF_INET, AF_INET6)
if family != AF_UNSPEC and family not in af_ok:
raise ValueError("Invalid family '%s'" % family)
ips = ()
try:
addrinfo = socket.getaddrinfo(hostname, None, family)
except socket.gaierror as exc:
# EAI_NODATA and EAI_NONAME are expected if this name is not (yet)
# present in DNS
if exc.errno not in (socket.EAI_NODATA, socket.EAI_NONAME):
LOG.debug("socket.getaddrinfo() raised an exception", exc_info=exc)
else:
if family == AF_UNSPEC:
ips = tuple({item[4][0] for item in addrinfo if item[0] in af_ok})
else:
ips = tuple({item[4][0] for item in addrinfo})
return ips | python | def resolve(hostname, family=AF_UNSPEC):
"""
Resolve hostname to one or more IP addresses through the operating system.
Resolution is carried out for the given address family. If no
address family is specified, only IPv4 and IPv6 addresses are returned. If
multiple IP addresses are found, all are returned.
:param family: AF_INET or AF_INET6 or AF_UNSPEC (default)
:return: tuple of unique IP addresses
"""
af_ok = (AF_INET, AF_INET6)
if family != AF_UNSPEC and family not in af_ok:
raise ValueError("Invalid family '%s'" % family)
ips = ()
try:
addrinfo = socket.getaddrinfo(hostname, None, family)
except socket.gaierror as exc:
# EAI_NODATA and EAI_NONAME are expected if this name is not (yet)
# present in DNS
if exc.errno not in (socket.EAI_NODATA, socket.EAI_NONAME):
LOG.debug("socket.getaddrinfo() raised an exception", exc_info=exc)
else:
if family == AF_UNSPEC:
ips = tuple({item[4][0] for item in addrinfo if item[0] in af_ok})
else:
ips = tuple({item[4][0] for item in addrinfo})
return ips | [
"def",
"resolve",
"(",
"hostname",
",",
"family",
"=",
"AF_UNSPEC",
")",
":",
"af_ok",
"=",
"(",
"AF_INET",
",",
"AF_INET6",
")",
"if",
"family",
"!=",
"AF_UNSPEC",
"and",
"family",
"not",
"in",
"af_ok",
":",
"raise",
"ValueError",
"(",
"\"Invalid family '... | Resolve hostname to one or more IP addresses through the operating system.
Resolution is carried out for the given address family. If no
address family is specified, only IPv4 and IPv6 addresses are returned. If
multiple IP addresses are found, all are returned.
:param family: AF_INET or AF_INET6 or AF_UNSPEC (default)
:return: tuple of unique IP addresses | [
"Resolve",
"hostname",
"to",
"one",
"or",
"more",
"IP",
"addresses",
"through",
"the",
"operating",
"system",
"."
] | 2196d48aa6098da9835a7611fbdb0b5f0fbf51e4 | https://github.com/infothrill/python-dyndnsc/blob/2196d48aa6098da9835a7611fbdb0b5f0fbf51e4/dyndnsc/detector/dns.py#L13-L40 | train | 44,191 |
infothrill/python-dyndnsc | dyndnsc/detector/dns.py | IPDetector_DNS.detect | def detect(self):
"""
Resolve the hostname to an IP address through the operating system.
Depending on the 'family' option, either ipv4 or ipv6 resolution is
carried out.
If multiple IP addresses are found, the first one is returned.
:return: ip address
"""
theip = next(iter(resolve(self.opts_hostname, self.opts_family)), None)
self.set_current_value(theip)
return theip | python | def detect(self):
"""
Resolve the hostname to an IP address through the operating system.
Depending on the 'family' option, either ipv4 or ipv6 resolution is
carried out.
If multiple IP addresses are found, the first one is returned.
:return: ip address
"""
theip = next(iter(resolve(self.opts_hostname, self.opts_family)), None)
self.set_current_value(theip)
return theip | [
"def",
"detect",
"(",
"self",
")",
":",
"theip",
"=",
"next",
"(",
"iter",
"(",
"resolve",
"(",
"self",
".",
"opts_hostname",
",",
"self",
".",
"opts_family",
")",
")",
",",
"None",
")",
"self",
".",
"set_current_value",
"(",
"theip",
")",
"return",
... | Resolve the hostname to an IP address through the operating system.
Depending on the 'family' option, either ipv4 or ipv6 resolution is
carried out.
If multiple IP addresses are found, the first one is returned.
:return: ip address | [
"Resolve",
"the",
"hostname",
"to",
"an",
"IP",
"address",
"through",
"the",
"operating",
"system",
"."
] | 2196d48aa6098da9835a7611fbdb0b5f0fbf51e4 | https://github.com/infothrill/python-dyndnsc/blob/2196d48aa6098da9835a7611fbdb0b5f0fbf51e4/dyndnsc/detector/dns.py#L70-L83 | train | 44,192 |
infothrill/python-dyndnsc | dyndnsc/cli.py | list_presets | def list_presets(cfg, out=sys.stdout):
"""Write a human readable list of available presets to out.
:param cfg: ConfigParser instance
:param out: file object to write to
"""
for section in cfg.sections():
if section.startswith("preset:"):
out.write((section.replace("preset:", "")) + os.linesep)
for k, v in cfg.items(section):
out.write("\t%s = %s" % (k, v) + os.linesep) | python | def list_presets(cfg, out=sys.stdout):
"""Write a human readable list of available presets to out.
:param cfg: ConfigParser instance
:param out: file object to write to
"""
for section in cfg.sections():
if section.startswith("preset:"):
out.write((section.replace("preset:", "")) + os.linesep)
for k, v in cfg.items(section):
out.write("\t%s = %s" % (k, v) + os.linesep) | [
"def",
"list_presets",
"(",
"cfg",
",",
"out",
"=",
"sys",
".",
"stdout",
")",
":",
"for",
"section",
"in",
"cfg",
".",
"sections",
"(",
")",
":",
"if",
"section",
".",
"startswith",
"(",
"\"preset:\"",
")",
":",
"out",
".",
"write",
"(",
"(",
"sec... | Write a human readable list of available presets to out.
:param cfg: ConfigParser instance
:param out: file object to write to | [
"Write",
"a",
"human",
"readable",
"list",
"of",
"available",
"presets",
"to",
"out",
"."
] | 2196d48aa6098da9835a7611fbdb0b5f0fbf51e4 | https://github.com/infothrill/python-dyndnsc/blob/2196d48aa6098da9835a7611fbdb0b5f0fbf51e4/dyndnsc/cli.py#L23-L33 | train | 44,193 |
infothrill/python-dyndnsc | dyndnsc/cli.py | create_argparser | def create_argparser():
"""Instantiate an `argparse.ArgumentParser`.
Adds all basic cli options including default values.
"""
parser = argparse.ArgumentParser()
arg_defaults = {
"daemon": False,
"loop": False,
"listpresets": False,
"config": None,
"debug": False,
"sleeptime": 300,
"version": False,
"verbose_count": 0
}
# add generic client options to the CLI:
parser.add_argument("-c", "--config", dest="config",
help="config file", default=arg_defaults["config"])
parser.add_argument("--list-presets", dest="listpresets",
help="list all available presets",
action="store_true", default=arg_defaults["listpresets"])
parser.add_argument("-d", "--daemon", dest="daemon",
help="go into daemon mode (implies --loop)",
action="store_true", default=arg_defaults["daemon"])
parser.add_argument("--debug", dest="debug",
help="increase logging level to DEBUG (DEPRECATED, please use -vvv)",
action="store_true", default=arg_defaults["debug"])
parser.add_argument("--loop", dest="loop",
help="loop forever (default is to update once)",
action="store_true", default=arg_defaults["loop"])
parser.add_argument("--sleeptime", dest="sleeptime",
help="how long to sleep between checks in seconds",
default=arg_defaults["sleeptime"])
parser.add_argument("--version", dest="version",
help="show version and exit",
action="store_true", default=arg_defaults["version"])
parser.add_argument("-v", "--verbose", dest="verbose_count",
action="count", default=arg_defaults["verbose_count"],
help="increases log verbosity for each occurrence")
return parser, arg_defaults | python | def create_argparser():
"""Instantiate an `argparse.ArgumentParser`.
Adds all basic cli options including default values.
"""
parser = argparse.ArgumentParser()
arg_defaults = {
"daemon": False,
"loop": False,
"listpresets": False,
"config": None,
"debug": False,
"sleeptime": 300,
"version": False,
"verbose_count": 0
}
# add generic client options to the CLI:
parser.add_argument("-c", "--config", dest="config",
help="config file", default=arg_defaults["config"])
parser.add_argument("--list-presets", dest="listpresets",
help="list all available presets",
action="store_true", default=arg_defaults["listpresets"])
parser.add_argument("-d", "--daemon", dest="daemon",
help="go into daemon mode (implies --loop)",
action="store_true", default=arg_defaults["daemon"])
parser.add_argument("--debug", dest="debug",
help="increase logging level to DEBUG (DEPRECATED, please use -vvv)",
action="store_true", default=arg_defaults["debug"])
parser.add_argument("--loop", dest="loop",
help="loop forever (default is to update once)",
action="store_true", default=arg_defaults["loop"])
parser.add_argument("--sleeptime", dest="sleeptime",
help="how long to sleep between checks in seconds",
default=arg_defaults["sleeptime"])
parser.add_argument("--version", dest="version",
help="show version and exit",
action="store_true", default=arg_defaults["version"])
parser.add_argument("-v", "--verbose", dest="verbose_count",
action="count", default=arg_defaults["verbose_count"],
help="increases log verbosity for each occurrence")
return parser, arg_defaults | [
"def",
"create_argparser",
"(",
")",
":",
"parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
")",
"arg_defaults",
"=",
"{",
"\"daemon\"",
":",
"False",
",",
"\"loop\"",
":",
"False",
",",
"\"listpresets\"",
":",
"False",
",",
"\"config\"",
":",
"None",
... | Instantiate an `argparse.ArgumentParser`.
Adds all basic cli options including default values. | [
"Instantiate",
"an",
"argparse",
".",
"ArgumentParser",
"."
] | 2196d48aa6098da9835a7611fbdb0b5f0fbf51e4 | https://github.com/infothrill/python-dyndnsc/blob/2196d48aa6098da9835a7611fbdb0b5f0fbf51e4/dyndnsc/cli.py#L36-L78 | train | 44,194 |
infothrill/python-dyndnsc | dyndnsc/cli.py | run_forever | def run_forever(dyndnsclients):
"""
Run an endless loop accross the give dynamic dns clients.
:param dyndnsclients: list of DynDnsClients
"""
while True:
try:
# Do small sleeps in the main loop, needs_check() is cheap and does
# the rest.
time.sleep(15)
for dyndnsclient in dyndnsclients:
dyndnsclient.check()
except (KeyboardInterrupt,):
break
except (Exception,) as exc:
LOG.critical("An exception occurred in the dyndns loop", exc_info=exc)
return 0 | python | def run_forever(dyndnsclients):
"""
Run an endless loop accross the give dynamic dns clients.
:param dyndnsclients: list of DynDnsClients
"""
while True:
try:
# Do small sleeps in the main loop, needs_check() is cheap and does
# the rest.
time.sleep(15)
for dyndnsclient in dyndnsclients:
dyndnsclient.check()
except (KeyboardInterrupt,):
break
except (Exception,) as exc:
LOG.critical("An exception occurred in the dyndns loop", exc_info=exc)
return 0 | [
"def",
"run_forever",
"(",
"dyndnsclients",
")",
":",
"while",
"True",
":",
"try",
":",
"# Do small sleeps in the main loop, needs_check() is cheap and does",
"# the rest.",
"time",
".",
"sleep",
"(",
"15",
")",
"for",
"dyndnsclient",
"in",
"dyndnsclients",
":",
"dynd... | Run an endless loop accross the give dynamic dns clients.
:param dyndnsclients: list of DynDnsClients | [
"Run",
"an",
"endless",
"loop",
"accross",
"the",
"give",
"dynamic",
"dns",
"clients",
"."
] | 2196d48aa6098da9835a7611fbdb0b5f0fbf51e4 | https://github.com/infothrill/python-dyndnsc/blob/2196d48aa6098da9835a7611fbdb0b5f0fbf51e4/dyndnsc/cli.py#L81-L98 | train | 44,195 |
infothrill/python-dyndnsc | dyndnsc/cli.py | main | def main():
"""
Run the main CLI program.
Initializes the stack, parses command line arguments, and fires requested
logic.
"""
plugins = DefaultPluginManager()
plugins.load_plugins()
parser, _ = create_argparser()
# add the updater protocol options to the CLI:
for kls in updater_classes():
kls.register_arguments(parser)
for kls in detector_classes():
kls.register_arguments(parser)
# add the plugin options to the CLI:
from os import environ
plugins.options(parser, environ)
args = parser.parse_args()
if args.debug:
args.verbose_count = 5 # some high number
log_level = max(int(logging.WARNING / 10) - args.verbose_count, 0) * 10
# print(log_level)
logging.basicConfig(level=log_level, format="%(levelname)s %(message)s")
# logging.debug("args %r", args)
if args.version:
from . import __version__
print("dyndnsc %s" % __version__) # noqa
return 0
# silence 'requests' logging
requests_log = logging.getLogger("requests")
requests_log.setLevel(logging.WARNING)
logging.debug(parser)
cfg = get_configuration(args.config)
if args.listpresets:
list_presets(cfg)
return 0
if args.config:
collected_configs = collect_config(cfg)
else:
parsed_args = parse_cmdline_args(args, updater_classes().union(detector_classes()))
logging.debug("parsed_args %r", parsed_args)
collected_configs = {
"cmdline": {
"interval": int(args.sleeptime)
}
}
collected_configs["cmdline"].update(parsed_args)
plugins.configure(args)
plugins.initialize()
logging.debug("collected_configs: %r", collected_configs)
dyndnsclients = []
for thisconfig in collected_configs:
logging.debug("Initializing client for '%s'", thisconfig)
# done with options, bring on the dancing girls
dyndnsclient = getDynDnsClientForConfig(
collected_configs[thisconfig], plugins=plugins)
if dyndnsclient is None:
return 1
# do an initial synchronization, before going into endless loop:
dyndnsclient.sync()
dyndnsclients.append(dyndnsclient)
run_forever_callable = partial(run_forever, dyndnsclients)
if args.daemon:
import daemonocle
daemon = daemonocle.Daemon(worker=run_forever_callable)
daemon.do_action("start")
args.loop = True
if args.loop:
run_forever_callable()
return 0 | python | def main():
"""
Run the main CLI program.
Initializes the stack, parses command line arguments, and fires requested
logic.
"""
plugins = DefaultPluginManager()
plugins.load_plugins()
parser, _ = create_argparser()
# add the updater protocol options to the CLI:
for kls in updater_classes():
kls.register_arguments(parser)
for kls in detector_classes():
kls.register_arguments(parser)
# add the plugin options to the CLI:
from os import environ
plugins.options(parser, environ)
args = parser.parse_args()
if args.debug:
args.verbose_count = 5 # some high number
log_level = max(int(logging.WARNING / 10) - args.verbose_count, 0) * 10
# print(log_level)
logging.basicConfig(level=log_level, format="%(levelname)s %(message)s")
# logging.debug("args %r", args)
if args.version:
from . import __version__
print("dyndnsc %s" % __version__) # noqa
return 0
# silence 'requests' logging
requests_log = logging.getLogger("requests")
requests_log.setLevel(logging.WARNING)
logging.debug(parser)
cfg = get_configuration(args.config)
if args.listpresets:
list_presets(cfg)
return 0
if args.config:
collected_configs = collect_config(cfg)
else:
parsed_args = parse_cmdline_args(args, updater_classes().union(detector_classes()))
logging.debug("parsed_args %r", parsed_args)
collected_configs = {
"cmdline": {
"interval": int(args.sleeptime)
}
}
collected_configs["cmdline"].update(parsed_args)
plugins.configure(args)
plugins.initialize()
logging.debug("collected_configs: %r", collected_configs)
dyndnsclients = []
for thisconfig in collected_configs:
logging.debug("Initializing client for '%s'", thisconfig)
# done with options, bring on the dancing girls
dyndnsclient = getDynDnsClientForConfig(
collected_configs[thisconfig], plugins=plugins)
if dyndnsclient is None:
return 1
# do an initial synchronization, before going into endless loop:
dyndnsclient.sync()
dyndnsclients.append(dyndnsclient)
run_forever_callable = partial(run_forever, dyndnsclients)
if args.daemon:
import daemonocle
daemon = daemonocle.Daemon(worker=run_forever_callable)
daemon.do_action("start")
args.loop = True
if args.loop:
run_forever_callable()
return 0 | [
"def",
"main",
"(",
")",
":",
"plugins",
"=",
"DefaultPluginManager",
"(",
")",
"plugins",
".",
"load_plugins",
"(",
")",
"parser",
",",
"_",
"=",
"create_argparser",
"(",
")",
"# add the updater protocol options to the CLI:",
"for",
"kls",
"in",
"updater_classes"... | Run the main CLI program.
Initializes the stack, parses command line arguments, and fires requested
logic. | [
"Run",
"the",
"main",
"CLI",
"program",
"."
] | 2196d48aa6098da9835a7611fbdb0b5f0fbf51e4 | https://github.com/infothrill/python-dyndnsc/blob/2196d48aa6098da9835a7611fbdb0b5f0fbf51e4/dyndnsc/cli.py#L101-L189 | train | 44,196 |
infothrill/python-dyndnsc | dyndnsc/conf.py | get_configuration | def get_configuration(config_file=None):
"""Return an initialized ConfigParser.
If no config filename is presented, `DEFAULT_USER_INI` is used if present.
Also reads the built-in presets.
:param config_file: string path
"""
parser = configparser.ConfigParser()
if config_file is None:
# fallback to default user config file
config_file = os.path.join(os.getenv("HOME"), DEFAULT_USER_INI)
if not os.path.isfile(config_file):
config_file = None
else:
if not os.path.isfile(config_file):
raise ValueError("%s is not a file" % config_file)
configs = [get_filename(PRESETS_INI)]
if config_file:
configs.append(config_file)
LOG.debug("Attempting to read configuration from %r", configs)
read_configs = parser.read(configs)
LOG.debug("Successfully read configuration from %r", read_configs)
LOG.debug("config file sections: %r", parser.sections())
return parser | python | def get_configuration(config_file=None):
"""Return an initialized ConfigParser.
If no config filename is presented, `DEFAULT_USER_INI` is used if present.
Also reads the built-in presets.
:param config_file: string path
"""
parser = configparser.ConfigParser()
if config_file is None:
# fallback to default user config file
config_file = os.path.join(os.getenv("HOME"), DEFAULT_USER_INI)
if not os.path.isfile(config_file):
config_file = None
else:
if not os.path.isfile(config_file):
raise ValueError("%s is not a file" % config_file)
configs = [get_filename(PRESETS_INI)]
if config_file:
configs.append(config_file)
LOG.debug("Attempting to read configuration from %r", configs)
read_configs = parser.read(configs)
LOG.debug("Successfully read configuration from %r", read_configs)
LOG.debug("config file sections: %r", parser.sections())
return parser | [
"def",
"get_configuration",
"(",
"config_file",
"=",
"None",
")",
":",
"parser",
"=",
"configparser",
".",
"ConfigParser",
"(",
")",
"if",
"config_file",
"is",
"None",
":",
"# fallback to default user config file",
"config_file",
"=",
"os",
".",
"path",
".",
"jo... | Return an initialized ConfigParser.
If no config filename is presented, `DEFAULT_USER_INI` is used if present.
Also reads the built-in presets.
:param config_file: string path | [
"Return",
"an",
"initialized",
"ConfigParser",
"."
] | 2196d48aa6098da9835a7611fbdb0b5f0fbf51e4 | https://github.com/infothrill/python-dyndnsc/blob/2196d48aa6098da9835a7611fbdb0b5f0fbf51e4/dyndnsc/conf.py#L20-L45 | train | 44,197 |
infothrill/python-dyndnsc | dyndnsc/conf.py | collect_config | def collect_config(cfg):
"""
Construct configuration dictionary from configparser.
Resolves presets and returns a dictionary containing:
.. code-block:: bash
{
"client_name": {
"detector": ("detector_name", detector_opts),
"updater": [
("updater_name", updater_opts),
...
]
},
...
}
:param cfg: ConfigParser
"""
collected_configs = {}
_updater_str = "updater"
_detector_str = "detector"
_dash = "-"
for client_name, client_cfg_dict in _iraw_client_configs(cfg):
detector_name = None
detector_options = {}
updater_name = None
updater_options = {}
collected_config = {}
for k in client_cfg_dict:
if k.startswith(_detector_str + _dash):
detector_options[
k.replace(_detector_str + _dash, "")] = client_cfg_dict[k]
elif k == _updater_str:
updater_name = client_cfg_dict.get(k)
elif k == _detector_str:
detector_name = client_cfg_dict.get(k)
elif k.startswith(_updater_str + _dash):
updater_options[
k.replace(_updater_str + _dash, "")] = client_cfg_dict[k]
else:
# options passed "as is" to the dyndnsc client
collected_config[k] = client_cfg_dict[k]
collected_config[_detector_str] = [(detector_name, detector_options)]
collected_config[_updater_str] = [(updater_name, updater_options)]
collected_configs[client_name] = collected_config
return collected_configs | python | def collect_config(cfg):
"""
Construct configuration dictionary from configparser.
Resolves presets and returns a dictionary containing:
.. code-block:: bash
{
"client_name": {
"detector": ("detector_name", detector_opts),
"updater": [
("updater_name", updater_opts),
...
]
},
...
}
:param cfg: ConfigParser
"""
collected_configs = {}
_updater_str = "updater"
_detector_str = "detector"
_dash = "-"
for client_name, client_cfg_dict in _iraw_client_configs(cfg):
detector_name = None
detector_options = {}
updater_name = None
updater_options = {}
collected_config = {}
for k in client_cfg_dict:
if k.startswith(_detector_str + _dash):
detector_options[
k.replace(_detector_str + _dash, "")] = client_cfg_dict[k]
elif k == _updater_str:
updater_name = client_cfg_dict.get(k)
elif k == _detector_str:
detector_name = client_cfg_dict.get(k)
elif k.startswith(_updater_str + _dash):
updater_options[
k.replace(_updater_str + _dash, "")] = client_cfg_dict[k]
else:
# options passed "as is" to the dyndnsc client
collected_config[k] = client_cfg_dict[k]
collected_config[_detector_str] = [(detector_name, detector_options)]
collected_config[_updater_str] = [(updater_name, updater_options)]
collected_configs[client_name] = collected_config
return collected_configs | [
"def",
"collect_config",
"(",
"cfg",
")",
":",
"collected_configs",
"=",
"{",
"}",
"_updater_str",
"=",
"\"updater\"",
"_detector_str",
"=",
"\"detector\"",
"_dash",
"=",
"\"-\"",
"for",
"client_name",
",",
"client_cfg_dict",
"in",
"_iraw_client_configs",
"(",
"cf... | Construct configuration dictionary from configparser.
Resolves presets and returns a dictionary containing:
.. code-block:: bash
{
"client_name": {
"detector": ("detector_name", detector_opts),
"updater": [
("updater_name", updater_opts),
...
]
},
...
}
:param cfg: ConfigParser | [
"Construct",
"configuration",
"dictionary",
"from",
"configparser",
"."
] | 2196d48aa6098da9835a7611fbdb0b5f0fbf51e4 | https://github.com/infothrill/python-dyndnsc/blob/2196d48aa6098da9835a7611fbdb0b5f0fbf51e4/dyndnsc/conf.py#L76-L126 | train | 44,198 |
infothrill/python-dyndnsc | dyndnsc/detector/command.py | IPDetector_Command.detect | def detect(self):
"""Detect and return the IP address."""
if PY3: # py23
import subprocess # noqa: S404 @UnresolvedImport pylint: disable=import-error
else:
import commands as subprocess # @UnresolvedImport pylint: disable=import-error
try:
theip = subprocess.getoutput(self.opts_command) # noqa: S605
except Exception:
theip = None
self.set_current_value(theip)
return theip | python | def detect(self):
"""Detect and return the IP address."""
if PY3: # py23
import subprocess # noqa: S404 @UnresolvedImport pylint: disable=import-error
else:
import commands as subprocess # @UnresolvedImport pylint: disable=import-error
try:
theip = subprocess.getoutput(self.opts_command) # noqa: S605
except Exception:
theip = None
self.set_current_value(theip)
return theip | [
"def",
"detect",
"(",
"self",
")",
":",
"if",
"PY3",
":",
"# py23",
"import",
"subprocess",
"# noqa: S404 @UnresolvedImport pylint: disable=import-error",
"else",
":",
"import",
"commands",
"as",
"subprocess",
"# @UnresolvedImport pylint: disable=import-error",
"try",
":",
... | Detect and return the IP address. | [
"Detect",
"and",
"return",
"the",
"IP",
"address",
"."
] | 2196d48aa6098da9835a7611fbdb0b5f0fbf51e4 | https://github.com/infothrill/python-dyndnsc/blob/2196d48aa6098da9835a7611fbdb0b5f0fbf51e4/dyndnsc/detector/command.py#L35-L46 | train | 44,199 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.