_id stringlengths 2 7 | title stringlengths 1 88 | partition stringclasses 3
values | text stringlengths 75 19.8k | language stringclasses 1
value | meta_information dict |
|---|---|---|---|---|---|
q268700 | release | test | def release(version):
"""Tags all submodules for a new release.
Ensures that git tags, as well as the version.py files in each submodule, agree and that the
new version is strictly greater than the current version. Will fail if the new version
is not an increment (following PEP 440). Creates a new git tag and commit.
"""
check_new_version(version)
set_new_version(version)
commit_new_version(version)
set_git_tag(version) | python | {
"resource": ""
} |
q268701 | PipelineContextDefinition.passthrough_context_definition | test | def passthrough_context_definition(context_params):
'''Create a context definition from a pre-existing context. This can be useful
in testing contexts where you may want to create a context manually and then
pass it into a one-off PipelineDefinition
Args:
context (ExecutionContext): The context that will provided to the pipeline.
Returns:
PipelineContextDefinition: The passthrough context definition.
'''
check.inst_param(context_params, 'context', ExecutionContext)
context_definition = PipelineContextDefinition(context_fn=lambda *_args: context_params)
return {DEFAULT_CONTEXT_NAME: context_definition} | python | {
"resource": ""
} |
q268702 | input_selector_schema | test | def input_selector_schema(config_cls):
'''
A decorator for annotating a function that can take the selected properties
from a ``config_value`` in to an instance of a custom type.
Args:
config_cls (Selector)
'''
config_type = resolve_config_cls_arg(config_cls)
check.param_invariant(config_type.is_selector, 'config_cls')
def _wrap(func):
def _selector(context, config_value):
selector_key, selector_value = single_item(config_value)
return func(context, selector_key, selector_value)
return _create_input_schema(config_type, _selector)
return _wrap | python | {
"resource": ""
} |
q268703 | output_selector_schema | test | def output_selector_schema(config_cls):
'''
A decorator for a annotating a function that can take the selected properties
of a ``config_value`` and an instance of a custom type and materialize it.
Args:
config_cls (Selector):
'''
config_type = resolve_config_cls_arg(config_cls)
check.param_invariant(config_type.is_selector, 'config_cls')
def _wrap(func):
def _selector(context, config_value, runtime_value):
selector_key, selector_value = single_item(config_value)
return func(context, selector_key, selector_value, runtime_value)
return _create_output_schema(config_type, _selector)
return _wrap | python | {
"resource": ""
} |
q268704 | IndentingPrinter.block | test | def block(self, text, prefix=''):
'''Automagically wrap a block of text.'''
wrapper = TextWrapper(
width=self.line_length - len(self.current_indent_str),
initial_indent=prefix,
subsequent_indent=prefix,
break_long_words=False,
break_on_hyphens=False,
)
for line in wrapper.wrap(text):
self.line(line) | python | {
"resource": ""
} |
q268705 | download_from_s3 | test | def download_from_s3(context):
'''Download an object from s3.
Args:
info (ExpectationExecutionInfo): Must expose a boto3 S3 client as its `s3` resource.
Returns:
str:
The path to the downloaded object.
'''
target_file = context.solid_config['target_file']
return context.resources.download_manager.download_file_contents(context, target_file) | python | {
"resource": ""
} |
q268706 | upload_to_s3 | test | def upload_to_s3(context, file_obj):
'''Upload a file to s3.
Args:
info (ExpectationExecutionInfo): Must expose a boto3 S3 client as its `s3` resource.
Returns:
(str, str):
The bucket and key to which the file was uploaded.
'''
bucket = context.solid_config['bucket']
key = context.solid_config['key']
context.resources.s3.put_object(
Bucket=bucket, Body=file_obj.read(), Key=key, **(context.solid_config.get('kwargs') or {})
)
yield Result(bucket, 'bucket')
yield Result(key, 'key') | python | {
"resource": ""
} |
q268707 | user_code_error_boundary | test | def user_code_error_boundary(error_cls, msg, **kwargs):
'''
Wraps the execution of user-space code in an error boundary. This places a uniform
policy around an user code invoked by the framework. This ensures that all user
errors are wrapped in the DagsterUserCodeExecutionError, and that the original stack
trace of the user error is preserved, so that it can be reported without confusing
framework code in the stack trace, if a tool author wishes to do so. This has
been especially help in a notebooking context.
'''
check.str_param(msg, 'msg')
check.subclass_param(error_cls, 'error_cls', DagsterUserCodeExecutionError)
try:
yield
except Exception as e: # pylint: disable=W0703
if isinstance(e, DagsterError):
# The system has thrown an error that is part of the user-framework contract
raise e
else:
# An exception has been thrown by user code and computation should cease
# with the error reported further up the stack
raise_from(
error_cls(msg, user_exception=e, original_exc_info=sys.exc_info(), **kwargs), e
) | python | {
"resource": ""
} |
q268708 | mkdir_p | test | def mkdir_p(newdir, mode=0o777):
"""The missing mkdir -p functionality in os."""
try:
os.makedirs(newdir, mode)
except OSError as err:
# Reraise the error unless it's about an already existing directory
if err.errno != errno.EEXIST or not os.path.isdir(newdir):
raise | python | {
"resource": ""
} |
q268709 | user_code_context_manager | test | def user_code_context_manager(user_fn, error_cls, msg):
'''Wraps the output of a user provided function that may yield or return a value and
returns a generator that asserts it only yields a single value.
'''
check.callable_param(user_fn, 'user_fn')
check.subclass_param(error_cls, 'error_cls', DagsterUserCodeExecutionError)
with user_code_error_boundary(error_cls, msg):
thing_or_gen = user_fn()
gen = _ensure_gen(thing_or_gen)
try:
thing = next(gen)
except StopIteration:
check.failed('Must yield one item. You did not yield anything.')
yield thing
stopped = False
try:
next(gen)
except StopIteration:
stopped = True
check.invariant(stopped, 'Must yield one item. Yielded more than one item') | python | {
"resource": ""
} |
q268710 | _create_context_free_log | test | def _create_context_free_log(run_config, pipeline_def):
'''In the event of pipeline initialization failure, we want to be able to log the failure
without a dependency on the ExecutionContext to initialize DagsterLog
'''
check.inst_param(run_config, 'run_config', RunConfig)
check.inst_param(pipeline_def, 'pipeline_def', PipelineDefinition)
# Use the default logger
loggers = [define_colored_console_logger('dagster')]
if run_config.event_callback:
loggers += [construct_event_logger(run_config.event_callback)]
elif run_config.loggers:
loggers += run_config.loggers
return DagsterLog(run_config.run_id, get_logging_tags(None, run_config, pipeline_def), loggers) | python | {
"resource": ""
} |
q268711 | SolidExecutionResult.success | test | def success(self):
'''Whether the solid execution was successful'''
any_success = False
for step_event in itertools.chain(
self.input_expectations, self.output_expectations, self.transforms
):
if step_event.event_type == DagsterEventType.STEP_FAILURE:
return False
if step_event.event_type == DagsterEventType.STEP_SUCCESS:
any_success = True
return any_success | python | {
"resource": ""
} |
q268712 | SolidExecutionResult.skipped | test | def skipped(self):
'''Whether the solid execution was skipped'''
return all(
[
step_event.event_type == DagsterEventType.STEP_SKIPPED
for step_event in itertools.chain(
self.input_expectations, self.output_expectations, self.transforms
)
]
) | python | {
"resource": ""
} |
q268713 | SolidExecutionResult.transformed_values | test | def transformed_values(self):
'''Return dictionary of transformed results, with keys being output names.
Returns None if execution result isn't a success.
Reconstructs the pipeline context to materialize values.
'''
if self.success and self.transforms:
with self.reconstruct_context() as context:
values = {
result.step_output_data.output_name: self._get_value(
context, result.step_output_data
)
for result in self.transforms
if result.is_successful_output
}
return values
else:
return None | python | {
"resource": ""
} |
q268714 | SolidExecutionResult.transformed_value | test | def transformed_value(self, output_name=DEFAULT_OUTPUT):
'''Returns transformed value either for DEFAULT_OUTPUT or for the output
given as output_name. Returns None if execution result isn't a success.
Reconstructs the pipeline context to materialize value.
'''
check.str_param(output_name, 'output_name')
if not self.solid.definition.has_output(output_name):
raise DagsterInvariantViolationError(
'{output_name} not defined in solid {solid}'.format(
output_name=output_name, solid=self.solid.name
)
)
if self.success:
for result in self.transforms:
if (
result.is_successful_output
and result.step_output_data.output_name == output_name
):
with self.reconstruct_context() as context:
value = self._get_value(context, result.step_output_data)
return value
raise DagsterInvariantViolationError(
(
'Did not find result {output_name} in solid {self.solid.name} '
'execution result'
).format(output_name=output_name, self=self)
)
else:
return None | python | {
"resource": ""
} |
q268715 | SolidExecutionResult.failure_data | test | def failure_data(self):
'''Returns the failing step's data that happened during this solid's execution, if any'''
for result in itertools.chain(
self.input_expectations, self.output_expectations, self.transforms
):
if result.event_type == DagsterEventType.STEP_FAILURE:
return result.step_failure_data | python | {
"resource": ""
} |
q268716 | PermissiveDict | test | def PermissiveDict(fields=None):
'''A permissive dict will permit the user to partially specify the permitted fields. Any fields
that are specified and passed in will be type checked. Other fields will be allowed, but
will be ignored by the type checker.
'''
if fields:
check_user_facing_fields_dict(fields, 'PermissiveDict')
class _PermissiveDict(_ConfigComposite):
def __init__(self):
key = 'PermissiveDict.' + str(DictCounter.get_next_count())
super(_PermissiveDict, self).__init__(
name=None,
key=key,
fields=fields or dict(),
description='A configuration dictionary with typed fields',
type_attributes=ConfigTypeAttributes(is_builtin=True),
)
@property
def is_permissive_composite(self):
return True
return _PermissiveDict | python | {
"resource": ""
} |
q268717 | _is_valid_dataset | test | def _is_valid_dataset(config_value):
'''Datasets must be of form "project.dataset" or "dataset"
'''
return re.match(
# regex matches: project.table -- OR -- table
r'^' + RE_PROJECT + r'\.' + RE_DS_TABLE + r'$|^' + RE_DS_TABLE + r'$',
config_value,
) | python | {
"resource": ""
} |
q268718 | _is_valid_table | test | def _is_valid_table(config_value):
'''Tables must be of form "project.dataset.table" or "dataset.table"
'''
return re.match(
r'^'
+ RE_PROJECT # project
+ r'\.' # .
+ RE_DS_TABLE # dataset
+ r'\.' # .
+ RE_DS_TABLE # table
+ r'$|^' # -- OR --
+ RE_DS_TABLE # dataset
+ r'\.' # .
+ RE_DS_TABLE # table
+ r'$',
config_value,
) | python | {
"resource": ""
} |
q268719 | _execute_core_transform | test | def _execute_core_transform(transform_context, inputs):
'''
Execute the user-specified transform for the solid. Wrap in an error boundary and do
all relevant logging and metrics tracking
'''
check.inst_param(transform_context, 'transform_context', SystemTransformExecutionContext)
check.dict_param(inputs, 'inputs', key_type=str)
step = transform_context.step
solid = step.solid
transform_context.log.debug(
'Executing core transform for solid {solid}.'.format(solid=solid.name)
)
all_results = []
for step_output in _yield_transform_results(transform_context, inputs):
yield step_output
if isinstance(step_output, StepOutputValue):
all_results.append(step_output)
if len(all_results) != len(solid.definition.output_defs):
emitted_result_names = {r.output_name for r in all_results}
solid_output_names = {output_def.name for output_def in solid.definition.output_defs}
omitted_outputs = solid_output_names.difference(emitted_result_names)
transform_context.log.info(
'Solid {solid} did not fire outputs {outputs}'.format(
solid=solid.name, outputs=repr(omitted_outputs)
)
) | python | {
"resource": ""
} |
q268720 | as_dagster_type | test | def as_dagster_type(
existing_type,
name=None,
description=None,
input_schema=None,
output_schema=None,
serialization_strategy=None,
storage_plugins=None,
):
'''
Takes a python cls and creates a type for it in the Dagster domain.
Args:
existing_type (cls)
The python type you want to project in to the Dagster type system.
name (Optional[str]):
description (Optiona[str]):
input_schema (Optional[InputSchema]):
An instance of a class that inherits from :py:class:`InputSchema` that
can map config data to a value of this type.
output_schema (Optiona[OutputSchema]):
An instance of a class that inherits from :py:class:`OutputSchema` that
can map config data to persisting values of this type.
serialization_strategy (Optional[SerializationStrategy]):
The default behavior for how to serialize this value for
persisting between execution steps.
storage_plugins (Optional[Dict[RunStorageMode, TypeStoragePlugin]]):
Storage type specific overrides for the serialization strategy.
This allows for storage specific optimzations such as effecient
distributed storage on S3.
'''
check.type_param(existing_type, 'existing_type')
check.opt_str_param(name, 'name')
check.opt_str_param(description, 'description')
check.opt_inst_param(input_schema, 'input_schema', InputSchema)
check.opt_inst_param(output_schema, 'output_schema', OutputSchema)
check.opt_inst_param(serialization_strategy, 'serialization_strategy', SerializationStrategy)
storage_plugins = check.opt_dict_param(storage_plugins, 'storage_plugins')
if serialization_strategy is None:
serialization_strategy = PickleSerializationStrategy()
name = existing_type.__name__ if name is None else name
return _decorate_as_dagster_type(
existing_type,
key=name,
name=name,
description=description,
input_schema=input_schema,
output_schema=output_schema,
serialization_strategy=serialization_strategy,
storage_plugins=storage_plugins,
) | python | {
"resource": ""
} |
q268721 | resource | test | def resource(config_field=None, description=None):
'''A decorator for creating a resource. The decorated function will be used as the
resource_fn in a ResourceDefinition.
'''
# This case is for when decorator is used bare, without arguments.
# E.g. @resource versus @resource()
if callable(config_field):
return ResourceDefinition(resource_fn=config_field)
def _wrap(resource_fn):
return ResourceDefinition(resource_fn, config_field, description)
return _wrap | python | {
"resource": ""
} |
q268722 | PagerDutyService.EventV2_create | test | def EventV2_create(
self,
summary,
source,
severity,
event_action='trigger',
dedup_key=None,
timestamp=None,
component=None,
group=None,
event_class=None,
custom_details=None,
):
'''Events API v2 enables you to add PagerDuty's advanced event and incident management
functionality to any system that can make an outbound HTTP connection.
Arguments:
summary {string} -- A high-level, text summary message of the event. Will be used to
construct an alert's description.
Example: "PING OK - Packet loss = 0%, RTA = 1.41 ms" "Host
'acme-andromeda-sv1-c40 :: 179.21.24.50' is DOWN"
source {string} -- Specific human-readable unique identifier, such as a hostname, for
the system having the problem.
Examples:
"prod05.theseus.acme-widgets.com"
"171.26.23.22"
"aws:elasticache:us-east-1:852511987:cluster/api-stats-prod-003"
"9c09acd49a25"
severity {string} -- How impacted the affected system is. Displayed to users in lists
and influences the priority of any created incidents. Must be one
of {info, warning, error, critical}
Keyword Arguments:
event_action {str} -- There are three types of events that PagerDuty recognizes, and
are used to represent different types of activity in your
monitored systems. (default: 'trigger')
* trigger: When PagerDuty receives a trigger event, it will either open a new alert,
or add a new trigger log entry to an existing alert, depending on the
provided dedup_key. Your monitoring tools should send PagerDuty a trigger
when a new problem has been detected. You may send additional triggers
when a previously detected problem has occurred again.
* acknowledge: acknowledge events cause the referenced incident to enter the
acknowledged state. While an incident is acknowledged, it won't
generate any additional notifications, even if it receives new
trigger events. Your monitoring tools should send PagerDuty an
acknowledge event when they know someone is presently working on the
problem.
* resolve: resolve events cause the referenced incident to enter the resolved state.
Once an incident is resolved, it won't generate any additional
notifications. New trigger events with the same dedup_key as a resolved
incident won't re-open the incident. Instead, a new incident will be
created. Your monitoring tools should send PagerDuty a resolve event when
the problem that caused the initial trigger event has been fixed.
dedup_key {string} -- Deduplication key for correlating triggers and resolves. The
maximum permitted length of this property is 255 characters.
timestamp {string} -- Timestamp (ISO 8601). When the upstream system detected / created
the event. This is useful if a system batches or holds events
before sending them to PagerDuty.
Optional - Will be auto-generated by PagerDuty if not provided.
Example:
2015-07-17T08:42:58.315+0000
component {string} -- The part or component of the affected system that is broken.
Examples:
"keepalive"
"webping"
"mysql"
"wqueue"
group {string} -- A cluster or grouping of sources. For example, sources
“prod-datapipe-02” and “prod-datapipe-03” might both be part of
“prod-datapipe”
Examples:
"prod-datapipe"
"www"
"web_stack"
event_class {string} -- The class/type of the event.
Examples:
"High CPU"
"Latency"
"500 Error"
custom_details {Dict[str, str]} -- Additional details about the event and affected
system.
Example:
{"ping time": "1500ms", "load avg": 0.75 }
'''
data = {
'routing_key': self.routing_key,
'event_action': event_action,
'payload': {'summary': summary, 'source': source, 'severity': severity},
}
if dedup_key is not None:
data['dedup_key'] = dedup_key
if timestamp is not None:
data['payload']['timestamp'] = timestamp
if component is not None:
data['payload']['component'] = component
if group is not None:
data['payload']['group'] = group
if event_class is not None:
data['payload']['class'] = event_class
if custom_details is not None:
data['payload']['custom_details'] = custom_details
return pypd.EventV2.create(data=data) | python | {
"resource": ""
} |
q268723 | coalesce_execution_steps | test | def coalesce_execution_steps(execution_plan):
'''Groups execution steps by solid, in topological order of the solids.'''
solid_order = _coalesce_solid_order(execution_plan)
steps = defaultdict(list)
for solid_name, solid_steps in itertools.groupby(
execution_plan.topological_steps(), lambda x: x.solid_name
):
steps[solid_name] += list(solid_steps)
return OrderedDict([(solid_name, steps[solid_name]) for solid_name in solid_order]) | python | {
"resource": ""
} |
q268724 | DatabaseWrapper.get_connection_params | test | def get_connection_params(self):
"""
Default method to acquire database connection parameters.
Sets connection parameters to match settings.py, and sets
default values to blank fields.
"""
valid_settings = {
'NAME': 'name',
'HOST': 'host',
'PORT': 'port',
'USER': 'username',
'PASSWORD': 'password',
'AUTH_SOURCE': 'authSource',
'AUTH_MECHANISM': 'authMechanism',
'ENFORCE_SCHEMA': 'enforce_schema',
'REPLICASET': 'replicaset',
'SSL': 'ssl',
'SSL_CERTFILE': 'ssl_certfile',
'SSL_CA_CERTS': 'ssl_ca_certs',
'READ_PREFERENCE': 'read_preference'
}
connection_params = {
'name': 'djongo_test',
'enforce_schema': True
}
for setting_name, kwarg in valid_settings.items():
try:
setting = self.settings_dict[setting_name]
except KeyError:
continue
if setting or setting is False:
connection_params[kwarg] = setting
return connection_params | python | {
"resource": ""
} |
q268725 | DatabaseWrapper.get_new_connection | test | def get_new_connection(self, connection_params):
"""
Receives a dictionary connection_params to setup
a connection to the database.
Dictionary correct setup is made through the
get_connection_params method.
TODO: This needs to be made more generic to accept
other MongoClient parameters.
"""
name = connection_params.pop('name')
es = connection_params.pop('enforce_schema')
connection_params['document_class'] = OrderedDict
# connection_params['tz_aware'] = True
# To prevent leaving unclosed connections behind,
# client_conn must be closed before a new connection
# is created.
if self.client_connection is not None:
self.client_connection.close()
self.client_connection = Database.connect(**connection_params)
database = self.client_connection[name]
self.djongo_connection = DjongoClient(database, es)
return self.client_connection[name] | python | {
"resource": ""
} |
q268726 | DatabaseWrapper.create_cursor | test | def create_cursor(self, name=None):
"""
Returns an active connection cursor to the database.
"""
return Cursor(self.client_connection, self.connection, self.djongo_connection) | python | {
"resource": ""
} |
q268727 | DatabaseWrapper._close | test | def _close(self):
"""
Closes the client connection to the database.
"""
if self.connection:
with self.wrap_database_errors:
self.connection.client.close() | python | {
"resource": ""
} |
q268728 | make_mdl | test | def make_mdl(model, model_dict):
"""
Builds an instance of model from the model_dict.
"""
for field_name in model_dict:
field = model._meta.get_field(field_name)
model_dict[field_name] = field.to_python(model_dict[field_name])
return model(**model_dict) | python | {
"resource": ""
} |
q268729 | ArrayModelField.to_python | test | def to_python(self, value):
"""
Overrides standard to_python method from django models to allow
correct translation of Mongo array to a python list.
"""
if value is None:
return value
assert isinstance(value, list)
ret = []
for mdl_dict in value:
if isinstance(mdl_dict, self.model_container):
ret.append(mdl_dict)
continue
mdl = make_mdl(self.model_container, mdl_dict)
ret.append(mdl)
return ret | python | {
"resource": ""
} |
q268730 | ArrayModelField.formfield | test | def formfield(self, **kwargs):
"""
Returns the formfield for the array.
"""
defaults = {
'form_class': ArrayFormField,
'model_container': self.model_container,
'model_form_class': self.model_form_class,
'name': self.attname,
'mdl_form_kw_l': self.model_form_kwargs_l
}
defaults.update(kwargs)
return super().formfield(**defaults) | python | {
"resource": ""
} |
q268731 | EmbeddedModelField.to_python | test | def to_python(self, value):
"""
Overrides Django's default to_python to allow correct
translation to instance.
"""
if value is None or isinstance(value, self.model_container):
return value
assert isinstance(value, dict)
instance = make_mdl(self.model_container, value)
return instance | python | {
"resource": ""
} |
q268732 | ArrayReferenceManagerMixin._apply_rel_filters | test | def _apply_rel_filters(self, queryset):
"""
Filter the queryset for the instance this manager is bound to.
"""
queryset._add_hints(instance=self.instance)
if self._db:
queryset = queryset.using(self._db)
queryset = queryset.filter(**self.core_filters)
return queryset | python | {
"resource": ""
} |
q268733 | _compute_nfps_uniform | test | def _compute_nfps_uniform(cum_counts, sizes):
"""Computes the matrix of expected false positives for all possible
sub-intervals of the complete domain of set sizes, assuming uniform
distribution of set_sizes within each sub-intervals.
Args:
cum_counts: the complete cummulative distribution of set sizes.
sizes: the complete domain of set sizes.
Return (np.array): the 2-D array of expected number of false positives
for every pair of [l, u] interval, where l is axis-0 and u is
axis-1.
"""
nfps = np.zeros((len(sizes), len(sizes)))
# All u an l are inclusive bounds for intervals.
# Compute p = 1, the NFPs
for l in range(len(sizes)):
for u in range(l, len(sizes)):
nfps[l, u] = _compute_nfp_uniform(l, u, cum_counts, sizes)
return nfps | python | {
"resource": ""
} |
q268734 | _compute_nfps_real | test | def _compute_nfps_real(counts, sizes):
"""Computes the matrix of expected false positives for all possible
sub-intervals of the complete domain of set sizes.
Args:
counts: the complete distribution of set sizes.
sizes: the complete domain of set sizes.
Return (np.array): the 2-D array of expected number of false positives
for every pair of [l, u] interval, where l is axis-0 and u is
axis-1.
"""
nfps = np.zeros((len(sizes), len(sizes)))
# All u an l are inclusive bounds for intervals.
# Compute p = 1, the NFPs
for l in range(len(sizes)):
for u in range(l, len(sizes)):
nfps[l, u] = _compute_nfp_real(l, u, counts, sizes)
return nfps | python | {
"resource": ""
} |
q268735 | _compute_best_partitions | test | def _compute_best_partitions(num_part, sizes, nfps):
"""Computes the optimal partitions given the size distributions
and computed number of expected false positives for all sub-intervals.
Args:
num_part (int): The number of partitions to create.
sizes (numpy.array): The complete domain of set sizes in sorted order.
nfps (numpy.array): The computed number of expected false positives
for all sub-intervals; axis-0 is for the indexes of lower bounds and
axis-1 is for the indexes of upper bounds.
Returns:
partitions (list): list of lower and upper bounds of set sizes for
all partitions.
total_nfps (float): total number of expected false positives from all
partitions.
cost (numpy.array): a N x p-1 matrix of the computed optimal NFPs for
all sub-problems given upper bound set size and number of partitions.
"""
if num_part < 2:
raise ValueError("num_part cannot be less than 2")
if num_part > len(sizes):
raise ValueError("num_part cannot be greater than the domain size of "
"all set sizes")
# If number of partitions is 2, then simply find the upper bound
# of the first partition.
if num_part == 2:
total_nfps, u = min((nfps[0, u1]+nfps[u1+1, len(sizes)-1], u1)
for u1 in range(0, len(sizes)-1))
return [(sizes[0], sizes[u]), (sizes[u+1], sizes[-1]),], \
total_nfps, None
# Initialize subproblem total NFPs.
cost = np.zeros((len(sizes), num_part-2))
# Note: p is the number of partitions in the subproblem.
# p2i translates the number of partition into the index in the matrix.
p2i = lambda p : p - 2
# Compute p >= 2 until before p = num_part.
for p in range(2, num_part):
# Compute best partition for subproblems with increasing
# max index u, starting from the smallest possible u given the p.
# The smallest possible u can be considered as the max index that
# generates p partitions each with only one size.
for u in range(p-1, len(sizes)):
if p == 2:
cost[u, p2i(p)] = min(nfps[0, u1]+nfps[u1+1,u]
for u1 in range(u))
else:
cost[u, p2i(p)] = min(cost[u1, p2i(p-1)] + nfps[u1+1, u]
for u1 in range((p-1)-1, u))
p = num_part
# Find the optimal upper bound index of the 2nd right-most partition given
# the number of partitions (p).
total_nfps, u = min((cost[u1, p2i(p-1)]+nfps[u1+1, len(sizes)-1], u1)
for u1 in range((p-1)-1, len(sizes)-1))
partitions = [(sizes[u+1], sizes[-1]),]
p -= 1
# Back track to find the best partitions.
while p > 1:
# Find the optimal upper bound index of the 2nd right-most partition
# givne the number of partitions (p) and upper bound index (u) in this
# sub-problem.
_, u1_best = min((cost[u1, p2i(p)]+nfps[u1+1, u], u1)
for u1 in range((p-1)-1, u))
partitions.insert(0, (sizes[u1_best+1], sizes[u]))
u = u1_best
p -= 1
partitions.insert(0, (sizes[0], sizes[u]))
return [partitions, total_nfps, cost] | python | {
"resource": ""
} |
q268736 | optimal_partitions | test | def optimal_partitions(sizes, counts, num_part):
"""Compute the optimal partitions given a distribution of set sizes.
Args:
sizes (numpy.array): The complete domain of set sizes in ascending
order.
counts (numpy.array): The frequencies of all set sizes in the same
order as `sizes`.
num_part (int): The number of partitions to create.
Returns:
list: A list of partitions in the form of `(lower, upper)` tuples,
where `lower` and `upper` are lower and upper bound (inclusive)
set sizes of each partition.
"""
if num_part < 2:
return [(sizes[0], sizes[-1])]
if num_part >= len(sizes):
partitions = [(x, x) for x in sizes]
return partitions
nfps = _compute_nfps_real(counts, sizes)
partitions, _, _ = _compute_best_partitions(num_part, sizes, nfps)
return partitions | python | {
"resource": ""
} |
q268737 | bBitMinHash._calc_c | test | def _calc_c(self, a1, a2, r1, r2):
'''
Compute the functions C1 and C2
'''
if r1 == 0.0 and r2 == 0.0:
# Find the limits of C1 and C2 as r1 -> 0 and r2 -> 0
# Since the b-value must be the same and r1 = r2,
# we have A1(r1, b1) = A2(r2, b2) = A,
# then the limits for both C1 and C2 are A.
return a1, a2
div = 1 / (r1 + r2)
c1 = (a1 * r2 + a2 * r1) * div
c2 = (a1 * r1 + a2 * r2) * div
return c1, c2 | python | {
"resource": ""
} |
q268738 | LeanMinHash._initialize_slots | test | def _initialize_slots(self, seed, hashvalues):
'''Initialize the slots of the LeanMinHash.
Args:
seed (int): The random seed controls the set of random
permutation functions generated for this LeanMinHash.
hashvalues: The hash values is the internal state of the LeanMinHash.
'''
self.seed = seed
self.hashvalues = self._parse_hashvalues(hashvalues) | python | {
"resource": ""
} |
q268739 | LeanMinHash.bytesize | test | def bytesize(self, byteorder='@'):
'''Compute the byte size after serialization.
Args:
byteorder (str, optional): This is byte order of the serialized data. Use one
of the `byte order characters
<https://docs.python.org/3/library/struct.html#byte-order-size-and-alignment>`_:
``@``, ``=``, ``<``, ``>``, and ``!``.
Default is ``@`` -- the native order.
Returns:
int: Size in number of bytes after serialization.
'''
# Use 8 bytes to store the seed integer
seed_size = struct.calcsize(byteorder+'q')
# Use 4 bytes to store the number of hash values
length_size = struct.calcsize(byteorder+'i')
# Use 4 bytes to store each hash value as we are using the lower 32 bit
hashvalue_size = struct.calcsize(byteorder+'I')
return seed_size + length_size + len(self) * hashvalue_size | python | {
"resource": ""
} |
q268740 | LeanMinHash.serialize | test | def serialize(self, buf, byteorder='@'):
'''
Serialize this lean MinHash and store the result in an allocated buffer.
Args:
buf (buffer): `buf` must implement the `buffer`_ interface.
One such example is the built-in `bytearray`_ class.
byteorder (str, optional): This is byte order of the serialized data. Use one
of the `byte order characters
<https://docs.python.org/3/library/struct.html#byte-order-size-and-alignment>`_:
``@``, ``=``, ``<``, ``>``, and ``!``.
Default is ``@`` -- the native order.
This is preferred over using `pickle`_ if the serialized lean MinHash needs
to be used by another program in a different programming language.
The serialization schema:
1. The first 8 bytes is the seed integer
2. The next 4 bytes is the number of hash values
3. The rest is the serialized hash values, each uses 4 bytes
Example:
To serialize a single lean MinHash into a `bytearray`_ buffer.
.. code-block:: python
buf = bytearray(lean_minhash.bytesize())
lean_minhash.serialize(buf)
To serialize multiple lean MinHash into a `bytearray`_ buffer.
.. code-block:: python
# assuming lean_minhashs is a list of LeanMinHash with the same size
size = lean_minhashs[0].bytesize()
buf = bytearray(size*len(lean_minhashs))
for i, lean_minhash in enumerate(lean_minhashs):
lean_minhash.serialize(buf[i*size:])
.. _`buffer`: https://docs.python.org/3/c-api/buffer.html
.. _`bytearray`: https://docs.python.org/3.6/library/functions.html#bytearray
.. _`byteorder`: https://docs.python.org/3/library/struct.html
'''
if len(buf) < self.bytesize():
raise ValueError("The buffer does not have enough space\
for holding this MinHash.")
fmt = "%sqi%dI" % (byteorder, len(self))
struct.pack_into(fmt, buf, 0,
self.seed, len(self), *self.hashvalues) | python | {
"resource": ""
} |
q268741 | LeanMinHash.deserialize | test | def deserialize(cls, buf, byteorder='@'):
'''
Deserialize a lean MinHash from a buffer.
Args:
buf (buffer): `buf` must implement the `buffer`_ interface.
One such example is the built-in `bytearray`_ class.
byteorder (str. optional): This is byte order of the serialized data. Use one
of the `byte order characters
<https://docs.python.org/3/library/struct.html#byte-order-size-and-alignment>`_:
``@``, ``=``, ``<``, ``>``, and ``!``.
Default is ``@`` -- the native order.
Return:
datasketch.LeanMinHash: The deserialized lean MinHash
Example:
To deserialize a lean MinHash from a buffer.
.. code-block:: python
lean_minhash = LeanMinHash.deserialize(buf)
'''
fmt_seed_size = "%sqi" % byteorder
fmt_hash = byteorder + "%dI"
try:
seed, num_perm = struct.unpack_from(fmt_seed_size, buf, 0)
except TypeError:
seed, num_perm = struct.unpack_from(fmt_seed_size, buffer(buf), 0)
offset = struct.calcsize(fmt_seed_size)
try:
hashvalues = struct.unpack_from(fmt_hash % num_perm, buf, offset)
except TypeError:
hashvalues = struct.unpack_from(fmt_hash % num_perm, buffer(buf), offset)
lmh = object.__new__(LeanMinHash)
lmh._initialize_slots(seed, hashvalues)
return lmh | python | {
"resource": ""
} |
q268742 | MinHash.update | test | def update(self, b):
'''Update this MinHash with a new value.
The value will be hashed using the hash function specified by
the `hashfunc` argument in the constructor.
Args:
b: The value to be hashed using the hash function specified.
Example:
To update with a new string value (using the default SHA1 hash
function, which requires bytes as input):
.. code-block:: python
minhash = Minhash()
minhash.update("new value".encode('utf-8'))
We can also use a different hash function, for example, `pyfarmhash`:
.. code-block:: python
import farmhash
def _hash_32(b):
return farmhash.hash32(b)
minhash = MinHash(hashfunc=_hash_32)
minhash.update("new value")
'''
hv = self.hashfunc(b)
a, b = self.permutations
phv = np.bitwise_and((a * hv + b) % _mersenne_prime, np.uint64(_max_hash))
self.hashvalues = np.minimum(phv, self.hashvalues) | python | {
"resource": ""
} |
q268743 | MinHash.merge | test | def merge(self, other):
'''Merge the other MinHash with this one, making this one the union
of both.
Args:
other (datasketch.MinHash): The other MinHash.
'''
if other.seed != self.seed:
raise ValueError("Cannot merge MinHash with\
different seeds")
if len(self) != len(other):
raise ValueError("Cannot merge MinHash with\
different numbers of permutation functions")
self.hashvalues = np.minimum(other.hashvalues, self.hashvalues) | python | {
"resource": ""
} |
q268744 | MinHash.union | test | def union(cls, *mhs):
'''Create a MinHash which is the union of the MinHash objects passed as arguments.
Args:
*mhs: The MinHash objects to be united. The argument list length is variable,
but must be at least 2.
Returns:
datasketch.MinHash: A new union MinHash.
'''
if len(mhs) < 2:
raise ValueError("Cannot union less than 2 MinHash")
num_perm = len(mhs[0])
seed = mhs[0].seed
if any((seed != m.seed or num_perm != len(m)) for m in mhs):
raise ValueError("The unioning MinHash must have the\
same seed and number of permutation functions")
hashvalues = np.minimum.reduce([m.hashvalues for m in mhs])
permutations = mhs[0].permutations
return cls(num_perm=num_perm, seed=seed, hashvalues=hashvalues,
permutations=permutations) | python | {
"resource": ""
} |
q268745 | MinHashLSHEnsemble.index | test | def index(self, entries):
'''
Index all sets given their keys, MinHashes, and sizes.
It can be called only once after the index is created.
Args:
entries (`iterable` of `tuple`): An iterable of tuples, each must be
in the form of `(key, minhash, size)`, where `key` is the unique
identifier of a set, `minhash` is the MinHash of the set,
and `size` is the size or number of unique items in the set.
Note:
`size` must be positive.
'''
if not self.is_empty():
raise ValueError("Cannot call index again on a non-empty index")
if not isinstance(entries, list):
queue = deque([])
for key, minhash, size in entries:
if size <= 0:
raise ValueError("Set size must be positive")
queue.append((key, minhash, size))
entries = list(queue)
if len(entries) == 0:
raise ValueError("entries is empty")
# Create optimal partitions.
sizes, counts = np.array(sorted(
Counter(e[2] for e in entries).most_common())).T
partitions = optimal_partitions(sizes, counts, len(self.indexes))
for i, (lower, upper) in enumerate(partitions):
self.lowers[i], self.uppers[i] = lower, upper
# Insert into partitions.
entries.sort(key=lambda e : e[2])
curr_part = 0
for key, minhash, size in entries:
if size > self.uppers[curr_part]:
curr_part += 1
for r in self.indexes[curr_part]:
self.indexes[curr_part][r].insert(key, minhash) | python | {
"resource": ""
} |
q268746 | MinHashLSHEnsemble.query | test | def query(self, minhash, size):
'''
Giving the MinHash and size of the query set, retrieve
keys that references sets with containment with respect to
the query set greater than the threshold.
Args:
minhash (datasketch.MinHash): The MinHash of the query set.
size (int): The size (number of unique items) of the query set.
Returns:
`iterator` of keys.
'''
for i, index in enumerate(self.indexes):
u = self.uppers[i]
if u is None:
continue
b, r = self._get_optimal_param(u, size)
for key in index[r]._query_b(minhash, b):
yield key | python | {
"resource": ""
} |
q268747 | WeightedMinHashGenerator.minhash | test | def minhash(self, v):
'''Create a new weighted MinHash given a weighted Jaccard vector.
Each dimension is an integer
frequency of the corresponding element in the multi-set represented
by the vector.
Args:
v (numpy.array): The Jaccard vector.
'''
if not isinstance(v, collections.Iterable):
raise TypeError("Input vector must be an iterable")
if not len(v) == self.dim:
raise ValueError("Input dimension mismatch, expecting %d" % self.dim)
if not isinstance(v, np.ndarray):
v = np.array(v, dtype=np.float32)
elif v.dtype != np.float32:
v = v.astype(np.float32)
hashvalues = np.zeros((self.sample_size, 2), dtype=np.int)
vzeros = (v == 0)
if vzeros.all():
raise ValueError("Input is all zeros")
v[vzeros] = np.nan
vlog = np.log(v)
for i in range(self.sample_size):
t = np.floor((vlog / self.rs[i]) + self.betas[i])
ln_y = (t - self.betas[i]) * self.rs[i]
ln_a = self.ln_cs[i] - ln_y - self.rs[i]
k = np.nanargmin(ln_a)
hashvalues[i][0], hashvalues[i][1] = k, int(t[k])
return WeightedMinHash(self.seed, hashvalues) | python | {
"resource": ""
} |
q268748 | MinHashLSH.remove | test | def remove(self, key):
'''
Remove the key from the index.
Args:
key (hashable): The unique identifier of a set.
'''
if self.prepickle:
key = pickle.dumps(key)
if key not in self.keys:
raise ValueError("The given key does not exist")
for H, hashtable in zip(self.keys[key], self.hashtables):
hashtable.remove_val(H, key)
if not hashtable.get(H):
hashtable.remove(H)
self.keys.remove(key) | python | {
"resource": ""
} |
q268749 | HyperLogLog.update | test | def update(self, b):
'''
Update the HyperLogLog with a new data value in bytes.
The value will be hashed using the hash function specified by
the `hashfunc` argument in the constructor.
Args:
b: The value to be hashed using the hash function specified.
Example:
To update with a new string value (using the default SHA1 hash
function, which requires bytes as input):
.. code-block:: python
hll = HyperLogLog()
hll.update("new value".encode('utf-8'))
We can also use a different hash function, for example, `pyfarmhash`:
.. code-block:: python
import farmhash
def _hash_32(b):
return farmhash.hash32(b)
hll = HyperLogLog(hashfunc=_hash_32)
hll.update("new value")
'''
# Digest the hash object to get the hash value
hv = self.hashfunc(b)
# Get the index of the register using the first p bits of the hash
reg_index = hv & (self.m - 1)
# Get the rest of the hash
bits = hv >> self.p
# Update the register
self.reg[reg_index] = max(self.reg[reg_index], self._get_rank(bits)) | python | {
"resource": ""
} |
q268750 | HyperLogLog.count | test | def count(self):
'''
Estimate the cardinality of the data values seen so far.
Returns:
int: The estimated cardinality.
'''
# Use HyperLogLog estimation function
e = self.alpha * float(self.m ** 2) / np.sum(2.0**(-self.reg))
# Small range correction
if e <= (5.0 / 2.0) * self.m:
num_zero = self.m - np.count_nonzero(self.reg)
return self._linearcounting(num_zero)
# Normal range, no correction
if e <= (1.0 / 30.0) * (1 << 32):
return e
# Large range correction
return self._largerange_correction(e) | python | {
"resource": ""
} |
q268751 | HyperLogLog.merge | test | def merge(self, other):
'''
Merge the other HyperLogLog with this one, making this the union of the
two.
Args:
other (datasketch.HyperLogLog):
'''
if self.m != other.m or self.p != other.p:
raise ValueError("Cannot merge HyperLogLog with different\
precisions.")
self.reg = np.maximum(self.reg, other.reg) | python | {
"resource": ""
} |
q268752 | HyperLogLog.clear | test | def clear(self):
'''
Reset the current HyperLogLog to empty.
'''
self.reg = np.zeros((self.m,), dtype=np.int8) | python | {
"resource": ""
} |
q268753 | apk | test | def apk(actual, predicted, k=10):
"""
Computes the average precision at k.
This function computes the average prescision at k between two lists of
items.
Parameters
----------
actual : list
A list of elements that are to be predicted (order doesn't matter)
predicted : list
A list of predicted elements (order does matter)
k : int, optional
The maximum number of predicted elements
Returns
-------
score : double
The average precision at k over the input lists
"""
if len(predicted)>k:
predicted = predicted[:k]
score = 0.0
num_hits = 0.0
for i,p in enumerate(predicted):
if p in actual and p not in predicted[:i]:
num_hits += 1.0
score += num_hits / (i+1.0)
if len(actual) == 0:
return 0.0
return score / min(len(actual), k) | python | {
"resource": ""
} |
q268754 | mapk | test | def mapk(actual, predicted, k=10):
"""
Computes the mean average precision at k.
This function computes the mean average prescision at k between two lists
of lists of items.
Parameters
----------
actual : list
A list of lists of elements that are to be predicted
(order doesn't matter in the lists)
predicted : list
A list of lists of predicted elements
(order matters in the lists)
k : int, optional
The maximum number of predicted elements
Returns
-------
score : double
The mean average precision at k over the input lists
"""
return np.mean([apk(a,p,k) for a,p in zip(actual, predicted)]) | python | {
"resource": ""
} |
q268755 | MinHashLSHForest.index | test | def index(self):
'''
Index all the keys added so far and make them searchable.
'''
for i, hashtable in enumerate(self.hashtables):
self.sorted_hashtables[i] = [H for H in hashtable.keys()]
self.sorted_hashtables[i].sort() | python | {
"resource": ""
} |
q268756 | MinHashLSHForest.query | test | def query(self, minhash, k):
'''
Return the approximate top-k keys that have the highest
Jaccard similarities to the query set.
Args:
minhash (datasketch.MinHash): The MinHash of the query set.
k (int): The maximum number of keys to return.
Returns:
`list` of at most k keys.
'''
if k <= 0:
raise ValueError("k must be positive")
if len(minhash) < self.k*self.l:
raise ValueError("The num_perm of MinHash out of range")
results = set()
r = self.k
while r > 0:
for key in self._query(minhash, r, self.l):
results.add(key)
if len(results) >= k:
return list(results)
r -= 1
return list(results) | python | {
"resource": ""
} |
q268757 | AsyncMinHashLSH.close | test | async def close(self):
"""
Cleanup client resources and disconnect from AsyncMinHashLSH storage.
"""
async with self._lock:
for t in self.hashtables:
await t.close()
if self.keys is not None:
await self.keys.close()
self._initialized = False | python | {
"resource": ""
} |
q268758 | ordered_storage | test | def ordered_storage(config, name=None):
'''Return ordered storage system based on the specified config.
The canonical example of such a storage container is
``defaultdict(list)``. Thus, the return value of this method contains
keys and values. The values are ordered lists with the last added
item at the end.
Args:
config (dict): Defines the configurations for the storage.
For in-memory storage, the config ``{'type': 'dict'}`` will
suffice. For Redis storage, the type should be ``'redis'`` and
the configurations for the Redis database should be supplied
under the key ``'redis'``. These parameters should be in a form
suitable for `redis.Redis`. The parameters may alternatively
contain references to environment variables, in which case
literal configuration values should be replaced by dicts of
the form::
{'env': 'REDIS_HOSTNAME',
'default': 'localhost'}
For a full example, see :ref:`minhash_lsh_at_scale`
name (bytes, optional): A reference name for this storage container.
For dict-type containers, this is ignored. For Redis containers,
this name is used to prefix keys pertaining to this storage
container within the database.
'''
tp = config['type']
if tp == 'dict':
return DictListStorage(config)
if tp == 'redis':
return RedisListStorage(config, name=name) | python | {
"resource": ""
} |
q268759 | unordered_storage | test | def unordered_storage(config, name=None):
'''Return an unordered storage system based on the specified config.
The canonical example of such a storage container is
``defaultdict(set)``. Thus, the return value of this method contains
keys and values. The values are unordered sets.
Args:
config (dict): Defines the configurations for the storage.
For in-memory storage, the config ``{'type': 'dict'}`` will
suffice. For Redis storage, the type should be ``'redis'`` and
the configurations for the Redis database should be supplied
under the key ``'redis'``. These parameters should be in a form
suitable for `redis.Redis`. The parameters may alternatively
contain references to environment variables, in which case
literal configuration values should be replaced by dicts of
the form::
{'env': 'REDIS_HOSTNAME',
'default': 'localhost'}
For a full example, see :ref:`minhash_lsh_at_scale`
name (bytes, optional): A reference name for this storage container.
For dict-type containers, this is ignored. For Redis containers,
this name is used to prefix keys pertaining to this storage
container within the database.
'''
tp = config['type']
if tp == 'dict':
return DictSetStorage(config)
if tp == 'redis':
return RedisSetStorage(config, name=name) | python | {
"resource": ""
} |
q268760 | JWTSerializer.get_user | test | def get_user(self, obj):
"""
Required to allow using custom USER_DETAILS_SERIALIZER in
JWTSerializer. Defining it here to avoid circular imports
"""
rest_auth_serializers = getattr(settings, 'REST_AUTH_SERIALIZERS', {})
JWTUserDetailsSerializer = import_callable(
rest_auth_serializers.get('USER_DETAILS_SERIALIZER', UserDetailsSerializer)
)
user_data = JWTUserDetailsSerializer(obj['user'], context=self.context).data
return user_data | python | {
"resource": ""
} |
q268761 | SocialConnectMixin.get_social_login | test | def get_social_login(self, *args, **kwargs):
"""
Set the social login process state to connect rather than login
Refer to the implementation of get_social_login in base class and to the
allauth.socialaccount.helpers module complete_social_login function.
"""
social_login = super(SocialConnectMixin, self).get_social_login(*args, **kwargs)
social_login.state['process'] = AuthProcess.CONNECT
return social_login | python | {
"resource": ""
} |
q268762 | select_text | test | def select_text(text, reading=False, prefer=None):
"""Select the correct text from the Japanese number, reading and
alternatives"""
# select kanji number or kana reading
if reading:
text = text[1]
else:
text = text[0]
# select the preferred one or the first one from multiple alternatives
if not isinstance(text, strtype):
common = set(text) & set(prefer or set())
if len(common) == 1:
text = common.pop()
else:
text = text[0]
return text | python | {
"resource": ""
} |
q268763 | parse_scoped_selector | test | def parse_scoped_selector(scoped_selector):
"""Parse scoped selector."""
# Conver Macro (%scope/name) to (scope/name/macro.value)
if scoped_selector[0] == '%':
if scoped_selector.endswith('.value'):
err_str = '{} is invalid cannot use % and end with .value'
raise ValueError(err_str.format(scoped_selector))
scoped_selector = scoped_selector[1:] + '/macro.value'
scope_selector_list = scoped_selector.rsplit('/', 1)
scope = ''.join(scope_selector_list[:-1])
selector = scope_selector_list[-1]
return scope, selector | python | {
"resource": ""
} |
q268764 | ConfigParser.parse_statement | test | def parse_statement(self):
"""Parse a single statement.
Returns:
Either a `BindingStatement`, `ImportStatement`, `IncludeStatement`, or
`None` if no more statements can be parsed (EOF reached).
"""
self._skip_whitespace_and_comments()
if self._current_token.kind == tokenize.ENDMARKER:
return None
# Save off location, but ignore char_num for any statement-level errors.
stmt_loc = self._current_location(ignore_char_num=True)
binding_key_or_keyword = self._parse_selector()
statement = None
if self._current_token.value != '=':
if binding_key_or_keyword == 'import':
module = self._parse_selector(scoped=False)
statement = ImportStatement(module, stmt_loc)
elif binding_key_or_keyword == 'include':
str_loc = self._current_location()
success, filename = self._maybe_parse_basic_type()
if not success or not isinstance(filename, str):
self._raise_syntax_error('Expected file path as string.', str_loc)
statement = IncludeStatement(filename, stmt_loc)
else:
self._raise_syntax_error("Expected '='.")
else: # We saw an '='.
self._advance_one_token()
value = self.parse_value()
scope, selector, arg_name = parse_binding_key(binding_key_or_keyword)
statement = BindingStatement(scope, selector, arg_name, value, stmt_loc)
assert statement, 'Internal parsing error.'
if (self._current_token.kind != tokenize.NEWLINE and
self._current_token.kind != tokenize.ENDMARKER):
self._raise_syntax_error('Expected newline.')
elif self._current_token.kind == tokenize.NEWLINE:
self._advance_one_token()
return statement | python | {
"resource": ""
} |
q268765 | ConfigParser.parse_value | test | def parse_value(self):
"""Parse a single literal value.
Returns:
The parsed value.
"""
parsers = [
self._maybe_parse_container, self._maybe_parse_basic_type,
self._maybe_parse_configurable_reference, self._maybe_parse_macro
]
for parser in parsers:
success, value = parser()
if success:
return value
self._raise_syntax_error('Unable to parse value.') | python | {
"resource": ""
} |
q268766 | ConfigParser.advance_one_line | test | def advance_one_line(self):
"""Advances to next line."""
current_line = self._current_token.line_number
while current_line == self._current_token.line_number:
self._current_token = ConfigParser.Token(*next(self._token_generator)) | python | {
"resource": ""
} |
q268767 | ConfigParser._maybe_parse_configurable_reference | test | def _maybe_parse_configurable_reference(self):
"""Try to parse a configurable reference (@[scope/name/]fn_name[()])."""
if self._current_token.value != '@':
return False, None
location = self._current_location()
self._advance_one_token()
scoped_name = self._parse_selector(allow_periods_in_scope=True)
evaluate = False
if self._current_token.value == '(':
evaluate = True
self._advance()
if self._current_token.value != ')':
self._raise_syntax_error("Expected ')'.")
self._advance_one_token()
self._skip_whitespace_and_comments()
with utils.try_with_location(location):
reference = self._delegate.configurable_reference(scoped_name, evaluate)
return True, reference | python | {
"resource": ""
} |
q268768 | augment_exception_message_and_reraise | test | def augment_exception_message_and_reraise(exception, message):
"""Reraises `exception`, appending `message` to its string representation."""
class ExceptionProxy(type(exception)):
"""Acts as a proxy for an exception with an augmented message."""
__module__ = type(exception).__module__
def __init__(self):
pass
def __getattr__(self, attr_name):
return getattr(exception, attr_name)
def __str__(self):
return str(exception) + message
ExceptionProxy.__name__ = type(exception).__name__
proxy = ExceptionProxy()
if six.PY3:
ExceptionProxy.__qualname__ = type(exception).__qualname__
six.raise_from(proxy.with_traceback(exception.__traceback__), None)
else:
six.reraise(proxy, None, sys.exc_info()[2]) | python | {
"resource": ""
} |
q268769 | GinConfigSaverHook._markdownify_operative_config_str | test | def _markdownify_operative_config_str(self, string):
"""Convert an operative config string to markdown format."""
# TODO: Total hack below. Implement more principled formatting.
def process(line):
"""Convert a single line to markdown format."""
if not line.startswith('#'):
return ' ' + line
line = line[2:]
if line.startswith('===='):
return ''
if line.startswith('None'):
return ' # None.'
if line.endswith(':'):
return '#### ' + line
return line
output_lines = []
for line in string.splitlines():
procd_line = process(line)
if procd_line is not None:
output_lines.append(procd_line)
return '\n'.join(output_lines) | python | {
"resource": ""
} |
q268770 | GinConfigSaverHook.after_create_session | test | def after_create_session(self, session=None, coord=None):
"""Writes out Gin's operative config, and maybe adds a summary of it."""
config_str = config.operative_config_str()
if not tf.gfile.IsDirectory(self._output_dir):
tf.gfile.MakeDirs(self._output_dir)
global_step_val = 0
if session is not None:
global_step = tf.train.get_global_step()
if global_step is not None:
global_step_val = session.run(global_step)
filename = '%s-%s.gin' % (self._base_name, global_step_val)
config_path = os.path.join(self._output_dir, filename)
with tf.gfile.GFile(config_path, 'w') as f:
f.write(config_str)
if self._summarize_config:
md_config_str = self._markdownify_operative_config_str(config_str)
summary_metadata = summary_pb2.SummaryMetadata()
summary_metadata.plugin_data.plugin_name = 'text'
summary_metadata.plugin_data.content = b'{}'
text_tensor = tf.make_tensor_proto(md_config_str)
summary = summary_pb2.Summary()
summary.value.add(
tag='gin/' + self._base_name,
tensor=text_tensor,
metadata=summary_metadata)
if not self._summary_writer:
# Creating the FileWriter also creates the events file, so it should be
# done here (where it is most likely to only occur on chief workers), as
# opposed to in the constructor.
self._summary_writer = tf.summary.FileWriterCache.get(self._output_dir)
self._summary_writer.add_summary(summary, global_step_val)
self._summary_writer.flush() | python | {
"resource": ""
} |
q268771 | _ensure_wrappability | test | def _ensure_wrappability(fn):
"""Make sure `fn` can be wrapped cleanly by functools.wraps."""
# Handle "wrapped_descriptor" and "method-wrapper" types.
if isinstance(fn, (type(object.__init__), type(object.__call__))):
# pylint: disable=unnecessary-lambda
wrappable_fn = lambda *args, **kwargs: fn(*args, **kwargs)
wrappable_fn.__name__ = fn.__name__
wrappable_fn.__doc__ = fn.__doc__
wrappable_fn.__module__ = '' # These types have no __module__, sigh.
wrappable_fn.__wrapped__ = fn
return wrappable_fn
# Otherwise we're good to go...
return fn | python | {
"resource": ""
} |
q268772 | _decorate_fn_or_cls | test | def _decorate_fn_or_cls(decorator, fn_or_cls, subclass=False):
"""Decorate a function or class with the given decorator.
When `fn_or_cls` is a function, applies `decorator` to the function and
returns the (decorated) result.
When `fn_or_cls` is a class and the `subclass` parameter is `False`, this will
replace `fn_or_cls.__init__` with the result of applying `decorator` to it.
When `fn_or_cls` is a class and `subclass` is `True`, this will subclass the
class, but with `__init__` defined to be the result of applying `decorator` to
`fn_or_cls.__init__`. The decorated class has metadata (docstring, name, and
module information) copied over from `fn_or_cls`. The goal is to provide a
decorated class the behaves as much like the original as possible, without
modifying it (for example, inspection operations using `isinstance` or
`issubclass` should behave the same way as on the original class).
Args:
decorator: The decorator to use.
fn_or_cls: The function or class to decorate.
subclass: Whether to decorate classes by subclassing. This argument is
ignored if `fn_or_cls` is not a class.
Returns:
The decorated function or class.
"""
if not inspect.isclass(fn_or_cls):
return decorator(_ensure_wrappability(fn_or_cls))
construction_fn = _find_class_construction_fn(fn_or_cls)
if subclass:
class DecoratedClass(fn_or_cls):
__doc__ = fn_or_cls.__doc__
__module__ = fn_or_cls.__module__
DecoratedClass.__name__ = fn_or_cls.__name__
if six.PY3:
DecoratedClass.__qualname__ = fn_or_cls.__qualname__
cls = DecoratedClass
else:
cls = fn_or_cls
decorated_fn = decorator(_ensure_wrappability(construction_fn))
if construction_fn.__name__ == '__new__':
decorated_fn = staticmethod(decorated_fn)
setattr(cls, construction_fn.__name__, decorated_fn)
return cls | python | {
"resource": ""
} |
q268773 | _format_value | test | def _format_value(value):
"""Returns `value` in a format parseable by `parse_value`, or `None`.
Simply put, This function ensures that when it returns a string value, the
following will hold:
parse_value(_format_value(value)) == value
Args:
value: The value to format.
Returns:
A string representation of `value` when `value` is literally representable,
or `None`.
"""
literal = repr(value)
try:
if parse_value(literal) == value:
return literal
except SyntaxError:
pass
return None | python | {
"resource": ""
} |
q268774 | clear_config | test | def clear_config(clear_constants=False):
"""Clears the global configuration.
This clears any parameter values set by `bind_parameter` or `parse_config`, as
well as the set of dynamically imported modules. It does not remove any
configurable functions or classes from the registry of configurables.
Args:
clear_constants: Whether to clear constants created by `constant`. Defaults
to False.
"""
_set_config_is_locked(False)
_CONFIG.clear()
_SINGLETONS.clear()
if clear_constants:
_CONSTANTS.clear()
else:
saved_constants = _CONSTANTS.copy()
_CONSTANTS.clear() # Clear then redefine constants (re-adding bindings).
for name, value in six.iteritems(saved_constants):
constant(name, value)
_IMPORTED_MODULES.clear()
_OPERATIVE_CONFIG.clear() | python | {
"resource": ""
} |
q268775 | bind_parameter | test | def bind_parameter(binding_key, value):
"""Binds the parameter value specified by `binding_key` to `value`.
The `binding_key` argument should either be a string of the form
`maybe/scope/optional.module.names.configurable_name.parameter_name`, or a
list or tuple of `(scope, selector, parameter_name)`, where `selector`
corresponds to `optional.module.names.configurable_name`. Once this function
has been called, subsequent calls (in the specified scope) to the specified
configurable function will have `value` supplied to their `parameter_name`
parameter.
Example:
@configurable('fully_connected_network')
def network_fn(num_layers=5, units_per_layer=1024):
...
def main(_):
config.bind_parameter('fully_connected_network.num_layers', 3)
network_fn() # Called with num_layers == 3, not the default of 5.
Args:
binding_key: The parameter whose value should be set. This can either be a
string, or a tuple of the form `(scope, selector, parameter)`.
value: The desired value.
Raises:
RuntimeError: If the config is locked.
ValueError: If no function can be found matching the configurable name
specified by `binding_key`, or if the specified parameter name is
blacklisted or not in the function's whitelist (if present).
"""
if config_is_locked():
raise RuntimeError('Attempted to modify locked Gin config.')
pbk = ParsedBindingKey(binding_key)
fn_dict = _CONFIG.setdefault(pbk.config_key, {})
fn_dict[pbk.arg_name] = value | python | {
"resource": ""
} |
q268776 | query_parameter | test | def query_parameter(binding_key):
"""Returns the currently bound value to the specified `binding_key`.
The `binding_key` argument should look like
'maybe/some/scope/maybe.moduels.configurable_name.parameter_name'. Note that
this will not include default parameters.
Args:
binding_key: The parameter whose value should be set.
Returns:
The value bound to the configurable/parameter combination given in
`binding_key`.
Raises:
ValueError: If no function can be found matching the configurable name
specified by `biding_key`, or if the specified parameter name is
blacklisted or not in the function's whitelist (if present) or if there is
no value bound for the queried parameter or configurable.
"""
pbk = ParsedBindingKey(binding_key)
if pbk.config_key not in _CONFIG:
err_str = "Configurable '{}' has no bound parameters."
raise ValueError(err_str.format(pbk.given_selector))
if pbk.arg_name not in _CONFIG[pbk.config_key]:
err_str = "Configurable '{}' has no value bound for parameter '{}'."
raise ValueError(err_str.format(pbk.given_selector, pbk.arg_name))
return _CONFIG[pbk.config_key][pbk.arg_name] | python | {
"resource": ""
} |
q268777 | _might_have_parameter | test | def _might_have_parameter(fn_or_cls, arg_name):
"""Returns True if `arg_name` might be a valid parameter for `fn_or_cls`.
Specifically, this means that `fn_or_cls` either has a parameter named
`arg_name`, or has a `**kwargs` parameter.
Args:
fn_or_cls: The function or class to check.
arg_name: The name fo the parameter.
Returns:
Whether `arg_name` might be a valid argument of `fn`.
"""
if inspect.isclass(fn_or_cls):
fn = _find_class_construction_fn(fn_or_cls)
else:
fn = fn_or_cls
while hasattr(fn, '__wrapped__'):
fn = fn.__wrapped__
arg_spec = _get_cached_arg_spec(fn)
if six.PY3:
if arg_spec.varkw:
return True
return arg_name in arg_spec.args or arg_name in arg_spec.kwonlyargs
else:
if arg_spec.keywords:
return True
return arg_name in arg_spec.args | python | {
"resource": ""
} |
q268778 | _get_cached_arg_spec | test | def _get_cached_arg_spec(fn):
"""Gets cached argspec for `fn`."""
arg_spec = _ARG_SPEC_CACHE.get(fn)
if arg_spec is None:
arg_spec_fn = inspect.getfullargspec if six.PY3 else inspect.getargspec
try:
arg_spec = arg_spec_fn(fn)
except TypeError:
# `fn` might be a callable object.
arg_spec = arg_spec_fn(fn.__call__)
_ARG_SPEC_CACHE[fn] = arg_spec
return arg_spec | python | {
"resource": ""
} |
q268779 | _get_supplied_positional_parameter_names | test | def _get_supplied_positional_parameter_names(fn, args):
"""Returns the names of the supplied arguments to the given function."""
arg_spec = _get_cached_arg_spec(fn)
# May be shorter than len(args) if args contains vararg (*args) arguments.
return arg_spec.args[:len(args)] | python | {
"resource": ""
} |
q268780 | _get_all_positional_parameter_names | test | def _get_all_positional_parameter_names(fn):
"""Returns the names of all positional arguments to the given function."""
arg_spec = _get_cached_arg_spec(fn)
args = arg_spec.args
if arg_spec.defaults:
args = args[:-len(arg_spec.defaults)]
return args | python | {
"resource": ""
} |
q268781 | _get_default_configurable_parameter_values | test | def _get_default_configurable_parameter_values(fn, whitelist, blacklist):
"""Retrieve all default values for configurable parameters of a function.
Any parameters included in the supplied blacklist, or not included in the
supplied whitelist, are excluded.
Args:
fn: The function whose parameter values should be retrieved.
whitelist: The whitelist (or `None`) associated with the function.
blacklist: The blacklist (or `None`) associated with the function.
Returns:
A dictionary mapping configurable parameter names to their default values.
"""
arg_vals = _ARG_DEFAULTS_CACHE.get(fn)
if arg_vals is not None:
return arg_vals.copy()
# First, grab any default values not captured in the kwargs var.
arg_spec = _get_cached_arg_spec(fn)
if arg_spec.defaults:
default_kwarg_names = arg_spec.args[-len(arg_spec.defaults):]
arg_vals = dict(zip(default_kwarg_names, arg_spec.defaults))
else:
arg_vals = {}
if six.PY3 and arg_spec.kwonlydefaults:
arg_vals.update(arg_spec.kwonlydefaults)
# Now, eliminate keywords that are blacklisted, or aren't whitelisted (if
# there's a whitelist), or aren't representable as a literal value.
for k in list(six.iterkeys(arg_vals)):
whitelist_fail = whitelist and k not in whitelist
blacklist_fail = blacklist and k in blacklist
representable = _is_literally_representable(arg_vals[k])
if whitelist_fail or blacklist_fail or not representable:
del arg_vals[k]
_ARG_DEFAULTS_CACHE[fn] = arg_vals
return arg_vals.copy() | python | {
"resource": ""
} |
q268782 | config_scope | test | def config_scope(name_or_scope):
"""Opens a new configuration scope.
Provides a context manager that opens a new explicit configuration
scope. Explicit configuration scopes restrict parameter bindings to only
certain sections of code that run within the scope. Scopes can be nested to
arbitrary depth; any configurable functions called within a scope inherit
parameters defined by higher level scopes.
For example, suppose a function named `preprocess_images` is called in two
places in a codebase: Once when loading data for a training task, and once
when loading data for an evaluation task:
def load_training_data():
...
with gin.config_scope('train'):
images = preprocess_images(images)
...
def load_eval_data():
...
with gin.config_scope('eval'):
images = preprocess_images(images)
...
By using a `config_scope` to wrap each invocation of `preprocess_images` as
above, it is possible to use Gin to supply specific parameters to each. Here
is a possible configuration for the above example:
preprocess_images.crop_size = [64, 64]
preprocess_images.normalize_image = True
train/preprocess_images.crop_location = 'random'
train/preprocess_images.random_flip_lr = True
eval/preprocess_images.crop_location = 'center'
The `crop_size` and `normalize_image` parameters above will be shared by both
the `train` and `eval` invocations; only `train` will receive
`random_flip_lr`, and the two invocations receive different values for
`crop_location`.
Passing `None` or `''` to `config_scope` will temporarily clear all currently
active scopes (within the `with` block; they will be restored afterwards).
Args:
name_or_scope: A name for the config scope, or an existing scope (e.g.,
captured from `with gin.config_scope(...) as scope`), or `None` to clear
currently active scopes.
Raises:
ValueError: If `name_or_scope` is not a list, string, or None.
Yields:
The resulting config scope (a list of all active scope names, ordered from
outermost to innermost).
"""
try:
valid_value = True
if isinstance(name_or_scope, list):
new_scope = name_or_scope
elif name_or_scope and isinstance(name_or_scope, six.string_types):
new_scope = current_scope() # Returns a copy.
new_scope.extend(name_or_scope.split('/'))
else:
valid_value = name_or_scope in (None, '')
new_scope = []
# Append new_scope first. It will be popped in the finally block if an
# exception is raised below.
_ACTIVE_SCOPES.append(new_scope)
scopes_are_valid = map(config_parser.MODULE_RE.match, new_scope)
if not valid_value or not all(scopes_are_valid):
err_str = 'Invalid value for `name_or_scope`: {}.'
raise ValueError(err_str.format(name_or_scope))
yield new_scope
finally:
_ACTIVE_SCOPES.pop() | python | {
"resource": ""
} |
q268783 | configurable | test | def configurable(name_or_fn=None, module=None, whitelist=None, blacklist=None):
"""Decorator to make a function or class configurable.
This decorator registers the decorated function/class as configurable, which
allows its parameters to be supplied from the global configuration (i.e., set
through `bind_parameter` or `parse_config`). The decorated function is
associated with a name in the global configuration, which by default is simply
the name of the function or class, but can be specified explicitly to avoid
naming collisions or improve clarity.
If some parameters should not be configurable, they can be specified in
`blacklist`. If only a restricted set of parameters should be configurable,
they can be specified in `whitelist`.
The decorator can be used without any parameters as follows:
@config.configurable
def some_configurable_function(param1, param2='a default value'):
...
In this case, the function is associated with the name
`'some_configurable_function'` in the global configuration, and both `param1`
and `param2` are configurable.
The decorator can be supplied with parameters to specify the configurable name
or supply a whitelist/blacklist:
@config.configurable('explicit_configurable_name', whitelist='param2')
def some_configurable_function(param1, param2='a default value'):
...
In this case, the configurable is associated with the name
`'explicit_configurable_name'` in the global configuration, and only `param2`
is configurable.
Classes can be decorated as well, in which case parameters of their
constructors are made configurable:
@config.configurable
class SomeClass(object):
def __init__(self, param1, param2='a default value'):
...
In this case, the name of the configurable is `'SomeClass'`, and both `param1`
and `param2` are configurable.
Args:
name_or_fn: A name for this configurable, or a function to decorate (in
which case the name will be taken from that function). If not set,
defaults to the name of the function/class that is being made
configurable. If a name is provided, it may also include module components
to be used for disambiguation (these will be appended to any components
explicitly specified by `module`).
module: The module to associate with the configurable, to help handle naming
collisions. By default, the module of the function or class being made
configurable will be used (if no module is specified as part of the name).
whitelist: A whitelisted set of kwargs that should be configurable. All
other kwargs will not be configurable. Only one of `whitelist` or
`blacklist` should be specified.
blacklist: A blacklisted set of kwargs that should not be configurable. All
other kwargs will be configurable. Only one of `whitelist` or `blacklist`
should be specified.
Returns:
When used with no parameters (or with a function/class supplied as the first
parameter), it returns the decorated function or class. When used with
parameters, it returns a function that can be applied to decorate the target
function or class.
"""
decoration_target = None
if callable(name_or_fn):
decoration_target = name_or_fn
name = None
else:
name = name_or_fn
def perform_decoration(fn_or_cls):
return _make_configurable(fn_or_cls, name, module, whitelist, blacklist)
if decoration_target:
return perform_decoration(decoration_target)
return perform_decoration | python | {
"resource": ""
} |
q268784 | operative_config_str | test | def operative_config_str(max_line_length=80, continuation_indent=4):
"""Retrieve the "operative" configuration as a config string.
The operative configuration consists of all parameter values used by
configurable functions that are actually called during execution of the
current program. Parameters associated with configurable functions that are
not called (and so can have no effect on program execution) won't be included.
The goal of the function is to return a config that captures the full set of
relevant configurable "hyperparameters" used by a program. As such, the
returned configuration will include the default values of arguments from
configurable functions (as long as the arguments aren't blacklisted or missing
from a supplied whitelist), as well as any parameter values overridden via
`bind_parameter` or through `parse_config`.
Any parameters that can't be represented as literals (capable of being parsed
by `parse_config`) are excluded. The resulting config string is sorted
lexicographically and grouped by configurable name.
Args:
max_line_length: A (soft) constraint on the maximum length of a line in the
formatted string. Large nested structures will be split across lines, but
e.g. long strings won't be split into a concatenation of shorter strings.
continuation_indent: The indentation for continued lines.
Returns:
A config string capturing all parameter values used by the current program.
"""
def format_binding(key, value):
"""Pretty print the given key/value pair."""
formatted_val = pprint.pformat(
value, width=(max_line_length - continuation_indent))
formatted_val_lines = formatted_val.split('\n')
if (len(formatted_val_lines) == 1 and
len(key + formatted_val) <= max_line_length):
output = '{} = {}'.format(key, formatted_val)
else:
indented_formatted_val = '\n'.join(
[' ' * continuation_indent + line for line in formatted_val_lines])
output = '{} = \\\n{}'.format(key, indented_formatted_val)
return output
def sort_key(key_tuple):
"""Sort configurable selector/innermost scopes, ignoring case."""
scope, selector = key_tuple[0]
parts = selector.lower().split('.')[::-1] + scope.lower().split('/')[::-1]
return '/'.join(parts)
# Build the output as an array of formatted Gin statements. Each statement may
# span multiple lines. Imports are first, followed by macros, and finally all
# other bindings sorted in alphabetical order by configurable name.
formatted_statements = [
'import {}'.format(module) for module in sorted(_IMPORTED_MODULES)
]
if formatted_statements:
formatted_statements.append('')
macros = {}
for (scope, selector), config in six.iteritems(_OPERATIVE_CONFIG):
if _REGISTRY[selector].fn_or_cls == macro:
macros[scope, selector] = config
if macros:
formatted_statements.append('# Macros:')
formatted_statements.append('# ' + '=' * (max_line_length - 2))
for (name, _), config in sorted(macros.items(), key=sort_key):
binding = format_binding(name, config['value'])
formatted_statements.append(binding)
if macros:
formatted_statements.append('')
sorted_items = sorted(_OPERATIVE_CONFIG.items(), key=sort_key)
for (scope, selector), config in sorted_items:
configurable_ = _REGISTRY[selector]
fn = configurable_.fn_or_cls
if fn == macro or fn == _retrieve_constant:
continue
minimal_selector = _REGISTRY.minimal_selector(configurable_.selector)
scoped_selector = (scope + '/' if scope else '') + minimal_selector
parameters = [(k, v) for k, v in six.iteritems(config)
if _is_literally_representable(v)]
formatted_statements.append('# Parameters for {}:'.format(scoped_selector))
formatted_statements.append('# ' + '=' * (max_line_length - 2))
for arg, val in sorted(parameters):
binding = format_binding('{}.{}'.format(scoped_selector, arg), val)
formatted_statements.append(binding)
if not parameters:
formatted_statements.append('# None.')
formatted_statements.append('')
return '\n'.join(formatted_statements) | python | {
"resource": ""
} |
q268785 | parse_config | test | def parse_config(bindings, skip_unknown=False):
"""Parse a file, string, or list of strings containing parameter bindings.
Parses parameter binding strings to set up the global configuration. Once
`parse_config` has been called, any calls to configurable functions will have
parameter values set according to the values specified by the parameter
bindings in `bindings`.
An individual parameter binding has the format
maybe/some/scopes/configurable_name.parameter_name = value
Multiple binding strings can be passed either in the form of a file-like
object supporting the `readline` method, a single string with each individual
parameter binding separated by a newline, or as a list of individual parameter
binding strings.
Any Python literal (lists, tuples, dicts, strings, etc.) is acceptable to the
right of the equals sign, and follows standard Python rules for line
continuation. Additionally, a value starting with '@' is interpreted as a
(possibly scoped) reference to another configurable function, in which case
this value is replaced by a reference to that function. If the value
furthermore ends in `()` (e.g., `@configurable_name()`), then the value
returned when calling the function is used (it will be called *just before*
the function consuming the output is called).
See the module documentation for a more detailed description of scoping
mechanisms and a complete example.
Reading from a file could be done as follows:
with open('/path/to/file.config') as bindings:
gin.parse_config(bindings)
Passing a newline separated string of parameter bindings might look like:
bindings = '''
my_class.param_one = 'asdf'
my_class_param_two = 9.7
'''
gin.parse_config(bindings)
Alternatively, one can declare a list of parameter bindings and pass it in:
bindings = [
'my_class.param_one = "asdf"',
'my_class.param_two = 9.7',
]
gin.parse_config(bindings)
Can skip unknown configurables. For example, if no module containing a
'training' configurable was imported, errors can be avoided by specifying
`skip_unknown=True`:
bindings = [
'my_class.param_one = "asdf"',
'my_class.param_two = 9.7',
'training.learning_rate = 0.1',
]
gin.parse_config(bindings, skip_unknown=True)
Args:
bindings: A file-like object supporting the readline method, a newline
separated string of parameter bindings, or a list of individual parameter
binding strings.
skip_unknown: A boolean indicating whether unknown configurables and imports
should be skipped (instead of causing an error). Configurable references
to unknown configurables will cause errors if they are present in a
binding that is not itself skipped due to an unknown configurable. This
can also be a list of configurable names: any unknown configurables that
do not match an item in the list will still cause errors. Note that
bindings for known configurables will always be parsed.
"""
if isinstance(bindings, (list, tuple)):
bindings = '\n'.join(bindings)
_validate_skip_unknown(skip_unknown)
if isinstance(skip_unknown, (list, tuple)):
skip_unknown = set(skip_unknown)
parser = config_parser.ConfigParser(bindings, ParserDelegate(skip_unknown))
for statement in parser:
if isinstance(statement, config_parser.BindingStatement):
scope, selector, arg_name, value, location = statement
if not arg_name:
macro_name = '{}/{}'.format(scope, selector) if scope else selector
with utils.try_with_location(location):
bind_parameter((macro_name, 'gin.macro', 'value'), value)
continue
if not _should_skip(selector, skip_unknown):
with utils.try_with_location(location):
bind_parameter((scope, selector, arg_name), value)
elif isinstance(statement, config_parser.ImportStatement):
if skip_unknown:
try:
__import__(statement.module)
_IMPORTED_MODULES.add(statement.module)
except ImportError:
log_str = 'Skipping import of unknown module `%s` (skip_unknown=%r).'
logging.info(log_str, statement.module, skip_unknown)
else:
with utils.try_with_location(statement.location):
__import__(statement.module)
_IMPORTED_MODULES.add(statement.module)
elif isinstance(statement, config_parser.IncludeStatement):
with utils.try_with_location(statement.location):
parse_config_file(statement.filename, skip_unknown)
else:
raise AssertionError('Unrecognized statement type {}.'.format(statement)) | python | {
"resource": ""
} |
q268786 | register_file_reader | test | def register_file_reader(*args):
"""Register a file reader for use in parse_config_file.
Registered file readers will be used to try reading files passed to
`parse_config_file`. All file readers (beginning with the default `open`) will
be tried until one of them succeeds at opening the file.
This function may also be be used used as a decorator. For example:
@register_file_reader(IOError)
def exotic_data_source(filename):
...
Args:
*args: (When used as a decorator, only the existence check is supplied.)
- file_reader_fn: The file reader function to register. This should be a
function that can be used as a context manager to open a file and
provide a file-like object, similar to Python's built-in `open`.
- is_readable_fn: A function taking the file path and returning a boolean
indicating whether the file can be read by `file_reader_fn`.
Returns:
`None`, or when used as a decorator, a function that will perform the
registration using the supplied readability predicate.
"""
def do_registration(file_reader_fn, is_readable_fn):
if file_reader_fn not in list(zip(*_FILE_READERS))[0]:
_FILE_READERS.append((file_reader_fn, is_readable_fn))
if len(args) == 1: # It's a decorator.
return functools.partial(do_registration, is_readable_fn=args[0])
elif len(args) == 2:
do_registration(*args)
else: # 0 or > 2 arguments supplied.
err_str = 'register_file_reader() takes 1 or 2 arguments ({} given)'
raise TypeError(err_str.format(len(args))) | python | {
"resource": ""
} |
q268787 | parse_config_file | test | def parse_config_file(config_file, skip_unknown=False):
"""Parse a Gin config file.
Args:
config_file: The path to a Gin config file.
skip_unknown: A boolean indicating whether unknown configurables and imports
should be skipped instead of causing errors (alternatively a list of
configurable names to skip if unknown). See `parse_config` for additional
details.
Raises:
IOError: If `config_file` cannot be read using any register file reader.
"""
for reader, existence_check in _FILE_READERS:
if existence_check(config_file):
with reader(config_file) as f:
parse_config(f, skip_unknown=skip_unknown)
return
raise IOError('Unable to open file: {}'.format(config_file)) | python | {
"resource": ""
} |
q268788 | parse_config_files_and_bindings | test | def parse_config_files_and_bindings(config_files,
bindings,
finalize_config=True,
skip_unknown=False):
"""Parse a list of config files followed by extra Gin bindings.
This function is equivalent to:
for config_file in config_files:
gin.parse_config_file(config_file, skip_configurables)
gin.parse_config(bindings, skip_configurables)
if finalize_config:
gin.finalize()
Args:
config_files: A list of paths to the Gin config files.
bindings: A list of individual parameter binding strings.
finalize_config: Whether to finalize the config after parsing and binding
(defaults to True).
skip_unknown: A boolean indicating whether unknown configurables and imports
should be skipped instead of causing errors (alternatively a list of
configurable names to skip if unknown). See `parse_config` for additional
details.
"""
if config_files is None:
config_files = []
if bindings is None:
bindings = ''
for config_file in config_files:
parse_config_file(config_file, skip_unknown)
parse_config(bindings, skip_unknown)
if finalize_config:
finalize() | python | {
"resource": ""
} |
q268789 | parse_value | test | def parse_value(value):
"""Parse and return a single Gin value."""
if not isinstance(value, six.string_types):
raise ValueError('value ({}) should be a string type.'.format(value))
return config_parser.ConfigParser(value, ParserDelegate()).parse_value() | python | {
"resource": ""
} |
q268790 | finalize | test | def finalize():
"""A function that should be called after parsing all Gin config files.
Calling this function allows registered "finalize hooks" to inspect (and
potentially modify) the Gin config, to provide additional functionality. Hooks
should not modify the configuration object they receive directly; instead,
they should return a dictionary mapping Gin binding keys to (new or updated)
values. This way, all hooks see the config as originally parsed.
Raises:
RuntimeError: If the config is already locked.
ValueError: If two or more hooks attempt to modify or introduce bindings for
the same key. Since it is difficult to control the order in which hooks
are registered, allowing this could yield unpredictable behavior.
"""
if config_is_locked():
raise RuntimeError('Finalize called twice (config already locked).')
bindings = {}
for hook in _FINALIZE_HOOKS:
new_bindings = hook(_CONFIG)
if new_bindings is not None:
for key, value in six.iteritems(new_bindings):
pbk = ParsedBindingKey(key)
if pbk in bindings:
err_str = 'Received conflicting updates when running {}.'
raise ValueError(err_str.format(hook))
bindings[pbk] = value
for pbk, value in six.iteritems(bindings):
bind_parameter(pbk, value)
_set_config_is_locked(True) | python | {
"resource": ""
} |
q268791 | _iterate_flattened_values | test | def _iterate_flattened_values(value):
"""Provides an iterator over all values in a nested structure."""
if isinstance(value, six.string_types):
yield value
return
if isinstance(value, collections.Mapping):
value = collections.ValuesView(value)
if isinstance(value, collections.Iterable):
for nested_value in value:
for nested_nested_value in _iterate_flattened_values(nested_value):
yield nested_nested_value
yield value | python | {
"resource": ""
} |
q268792 | iterate_references | test | def iterate_references(config, to=None):
"""Provides an iterator over references in the given config.
Args:
config: A dictionary mapping scoped configurable names to argument bindings.
to: If supplied, only yield references whose `configurable_fn` matches `to`.
Yields:
`ConfigurableReference` instances within `config`, maybe restricted to those
matching the `to` parameter if it is supplied.
"""
for value in _iterate_flattened_values(config):
if isinstance(value, ConfigurableReference):
if to is None or value.configurable.fn_or_cls == to:
yield value | python | {
"resource": ""
} |
q268793 | constant | test | def constant(name, value):
"""Creates a constant that can be referenced from gin config files.
After calling this function in Python, the constant can be referenced from
within a Gin config file using the macro syntax. For example, in Python:
gin.constant('THE_ANSWER', 42)
Then, in a Gin config file:
meaning.of_life = %THE_ANSWER
Note that any Python object can be used as the value of a constant (including
objects not representable as Gin literals). Values will be stored until
program termination in a Gin-internal dictionary, so avoid creating constants
with values that should have a limited lifetime.
Optionally, a disambiguating module may be prefixed onto the constant
name. For instance:
gin.constant('some.modules.PI', 3.14159)
Args:
name: The name of the constant, possibly prepended by one or more
disambiguating module components separated by periods. An macro with this
name (including the modules) will be created.
value: The value of the constant. This can be anything (including objects
not representable as Gin literals). The value will be stored and returned
whenever the constant is referenced.
Raises:
ValueError: If the constant's selector is invalid, or a constant with the
given selector already exists.
"""
if not config_parser.MODULE_RE.match(name):
raise ValueError("Invalid constant selector '{}'.".format(name))
if _CONSTANTS.matching_selectors(name):
err_str = "Constants matching selector '{}' already exist ({})."
raise ValueError(err_str.format(name, _CONSTANTS.matching_selectors(name)))
_CONSTANTS[name] = value | python | {
"resource": ""
} |
q268794 | constants_from_enum | test | def constants_from_enum(cls, module=None):
"""Decorator for an enum class that generates Gin constants from values.
Generated constants have format `module.ClassName.ENUM_VALUE`. The module
name is optional when using the constant.
Args:
cls: Class type.
module: The module to associate with the constants, to help handle naming
collisions. If `None`, `cls.__module__` will be used.
Returns:
Class type (identity function).
Raises:
TypeError: When applied to a non-enum class.
"""
if not issubclass(cls, enum.Enum):
raise TypeError("Class '{}' is not subclass of enum.".format(cls.__name__))
if module is None:
module = cls.__module__
for value in cls:
constant('{}.{}'.format(module, str(value)), value)
return cls | python | {
"resource": ""
} |
q268795 | SelectorMap.matching_selectors | test | def matching_selectors(self, partial_selector):
"""Retrieves all selectors matching `partial_selector`.
For instance, if "one.a.b" and "two.a.b" are stored in a `SelectorMap`, both
`matching_selectors('b')` and `matching_selectors('a.b')` will return them.
In the event that `partial_selector` exactly matches an existing complete
selector, only that complete selector is returned. For instance, if
"a.b.c.d" and "c.d" are stored, `matching_selectors('c.d')` will return only
`['c.d']`, while `matching_selectors('d')` will return both.
Args:
partial_selector: The partial selector to find matches for.
Returns:
A list of selectors matching `partial_selector`.
"""
if partial_selector in self._selector_map:
return [partial_selector]
selector_components = partial_selector.split('.')
node = self._selector_tree
for component in reversed(selector_components):
if component not in node:
return []
node = node[component]
selectors = []
dfs_stack = [node]
while dfs_stack:
node = dfs_stack.pop().copy()
selector = node.pop(_TERMINAL_KEY, None)
dfs_stack.extend(node.values())
if selector:
selectors.append(selector)
return selectors | python | {
"resource": ""
} |
q268796 | SelectorMap.get_all_matches | test | def get_all_matches(self, partial_selector):
"""Returns all values matching `partial_selector` as a list."""
matching_selectors = self.matching_selectors(partial_selector)
return [self._selector_map[selector] for selector in matching_selectors] | python | {
"resource": ""
} |
q268797 | SelectorMap.minimal_selector | test | def minimal_selector(self, complete_selector):
"""Returns the minimal selector that uniquely matches `complete_selector`.
Args:
complete_selector: A complete selector stored in the map.
Returns:
A partial selector that unambiguously matches `complete_selector`.
Raises:
KeyError: If `complete_selector` is not in the map.
"""
if complete_selector not in self._selector_map:
raise KeyError("No value with selector '{}'.".format(complete_selector))
selector_components = complete_selector.split('.')
node = self._selector_tree
start = None
for i, component in enumerate(reversed(selector_components)):
if len(node) == 1:
if start is None:
start = -i # Negative index, since we're iterating in reverse.
else:
start = None
node = node[component]
if len(node) > 1: # The selector is a substring of another selector.
return complete_selector
return '.'.join(selector_components[start:]) | python | {
"resource": ""
} |
q268798 | sp_search_query | test | def sp_search_query(query):
"""Translate a Mopidy search query to a Spotify search query"""
result = []
for (field, values) in query.items():
field = SEARCH_FIELD_MAP.get(field, field)
if field is None:
continue
for value in values:
if field == 'year':
value = _transform_year(value)
if value is not None:
result.append('%s:%d' % (field, value))
elif field == 'any':
result.append('"%s"' % value)
else:
result.append('%s:"%s"' % (field, value))
return ' '.join(result) | python | {
"resource": ""
} |
q268799 | OAuthClient._parse_retry_after | test | def _parse_retry_after(self, response):
"""Parse Retry-After header from response if it is set."""
value = response.headers.get('Retry-After')
if not value:
seconds = 0
elif re.match(r'^\s*[0-9]+\s*$', value):
seconds = int(value)
else:
date_tuple = email.utils.parsedate(value)
if date_tuple is None:
seconds = 0
else:
seconds = time.mktime(date_tuple) - time.time()
return max(0, seconds) | python | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.