text_prompt stringlengths 100 17.7k ⌀ | code_prompt stringlengths 7 9.86k ⌀ |
|---|---|
<SYSTEM_TASK:>
Read the line content and return the converted value
<END_TASK>
<USER_TASK:>
Description:
def convert(self, line=None, is_end=True):
"""Read the line content and return the converted value
:param line: the line to feed to converter
:param is_end: if set to True, will raise an err... |
if line is not None:
self.line = line
if not self.line:
raise TomlDecodeError(self.parser.lineno,
'EOF is hit!')
token = None
self.line = self.line.lstrip()
for key, pattern in self.patterns:
m = pattern.match... |
<SYSTEM_TASK:>
Parse the lines from index i
<END_TASK>
<USER_TASK:>
Description:
def parse(self, data=None, table_name=None):
"""Parse the lines from index i
:param data: optional, store the parsed result to it when specified
:param table_name: when inside a table array, it is the table name
... |
temp = self.dict_()
sub_table = None
is_array = False
line = ''
while True:
line = self._readline()
if not line:
self._store_table(sub_table, temp, is_array, data=data)
break # EOF
if BLANK_RE.match(line):... |
<SYSTEM_TASK:>
Return a boolean if the page is visible in navigation.
<END_TASK>
<USER_TASK:>
Description:
def is_visible(self):
"""
Return a boolean if the page is visible in navigation.
Pages must have show in navigation set. Regular pages must be published (published and
have a curre... |
if self.glitter_app_name:
visible = self.show_in_navigation
else:
visible = self.show_in_navigation and self.is_published
return visible |
<SYSTEM_TASK:>
Add this method because django doesn't validate correctly because required fields are
<END_TASK>
<USER_TASK:>
Description:
def validate_unique(self):
"""
Add this method because django doesn't validate correctly because required fields are
excluded.
""" |
unique_checks, date_checks = self.instance._get_unique_checks(exclude=[])
errors = self.instance._perform_unique_checks(unique_checks)
if errors:
self.add_error(None, errors) |
<SYSTEM_TASK:>
Create Image from raw dictionary data.
<END_TASK>
<USER_TASK:>
Description:
def from_dict(raw_data):
"""Create Image from raw dictionary data.""" |
url = None
width = None
height = None
try:
url = raw_data['url']
width = raw_data['width']
height = raw_data['height']
except KeyError:
raise ValueError('Unexpected image json structure')
except TypeError:
# Hap... |
<SYSTEM_TASK:>
Return set of common words between two word sets.
<END_TASK>
<USER_TASK:>
Description:
def has_common(self, other):
"""Return set of common words between two word sets.""" |
if not isinstance(other, WordSet):
raise ValueError('Can compare only WordSets')
return self.term_set & other.term_set |
<SYSTEM_TASK:>
Create a new release in github
<END_TASK>
<USER_TASK:>
Description:
def release(ctx, yes, latest):
"""Create a new release in github
""" |
m = RepoManager(ctx.obj['agile'])
api = m.github_repo()
if latest:
latest = api.releases.latest()
if latest:
click.echo(latest['tag_name'])
elif m.can_release('sandbox'):
branch = m.info['branch']
version = m.validate_version()
name = 'v%s' % version
... |
<SYSTEM_TASK:>
Create XenaManager object.
<END_TASK>
<USER_TASK:>
Description:
def init_xena(api, logger, owner, ip=None, port=57911):
""" Create XenaManager object.
:param api: cli/rest
:param logger: python logger
:param owner: owner of the scripting session
:param ip: rest server IP
:param p... |
if api == ApiType.socket:
api_wrapper = XenaCliWrapper(logger)
elif api == ApiType.rest:
api_wrapper = XenaRestWrapper(logger, ip, port)
return XenaApp(logger, owner, api_wrapper) |
<SYSTEM_TASK:>
Add chassis.
<END_TASK>
<USER_TASK:>
Description:
def add_chassis(self, chassis, port=22611, password='xena'):
""" Add chassis.
XenaManager-2G -> Add Chassis.
:param chassis: chassis IP address
:param port: chassis port number
:param password: chassis password
... |
if chassis not in self.chassis_list:
try:
XenaChassis(self, chassis, port, password)
except Exception as error:
self.objects.pop('{}/{}'.format(self.owner, chassis))
raise error
return self.chassis_list[chassis] |
<SYSTEM_TASK:>
Stop traffic on list of ports.
<END_TASK>
<USER_TASK:>
Description:
def stop_traffic(self, *ports):
""" Stop traffic on list of ports.
:param ports: list of ports to stop traffic on. Default - all session ports.
""" |
for chassis, chassis_ports in self._per_chassis_ports(*self._get_operation_ports(*ports)).items():
chassis.stop_traffic(*chassis_ports) |
<SYSTEM_TASK:>
Decorator to registering you Admin class.
<END_TASK>
<USER_TASK:>
Description:
def register(model, admin=None, category=None):
""" Decorator to registering you Admin class. """ |
def _model_admin_wrapper(admin_class):
site.register(model, admin_class=admin_class)
if category:
site.register_block(model, category)
return admin_class
return _model_admin_wrapper |
<SYSTEM_TASK:>
Determine the HttpResponse for the change_view stage.
<END_TASK>
<USER_TASK:>
Description:
def response_change(self, request, obj):
"""Determine the HttpResponse for the change_view stage.""" |
opts = self.opts.app_label, self.opts.model_name
pk_value = obj._get_pk_val()
if '_continue' in request.POST:
msg = _(
'The %(name)s block was changed successfully. You may edit it again below.'
) % {'name': force_text(self.opts.verbose_name)}
... |
<SYSTEM_TASK:>
A field could be found for this term, try to get filter string for it.
<END_TASK>
<USER_TASK:>
Description:
def get_filter_item(name: str, operation: bytes, value: bytes) -> bytes:
"""
A field could be found for this term, try to get filter string for it.
""" |
assert isinstance(name, str)
assert isinstance(value, bytes)
if operation is None:
return filter_format(b"(%s=%s)", [name, value])
elif operation == "contains":
assert value != ""
return filter_format(b"(%s=*%s*)", [name, value])
else:
raise ValueError("Unknown searc... |
<SYSTEM_TASK:>
Translate the Q tree into a filter string to search for, or None
<END_TASK>
<USER_TASK:>
Description:
def get_filter(q: tldap.Q, fields: Dict[str, tldap.fields.Field], pk: str):
"""
Translate the Q tree into a filter string to search for, or None
if no results possible.
""" |
# check the details are valid
if q.negated and len(q.children) == 1:
op = b"!"
elif q.connector == tldap.Q.AND:
op = b"&"
elif q.connector == tldap.Q.OR:
op = b"|"
else:
raise ValueError("Invalid value of op found")
# scan through every child
search = []
... |
<SYSTEM_TASK:>
r"""The name of the script, callable from the command line.
<END_TASK>
<USER_TASK:>
Description:
def program_name(self):
r"""The name of the script, callable from the command line.
""" |
name = "-".join(
word.lower() for word in uqbar.strings.delimit_words(type(self).__name__)
)
return name |
<SYSTEM_TASK:>
Function does nothing - is just ``pass`` or docstring.
<END_TASK>
<USER_TASK:>
Description:
def function_is_noop(function_node: ast.FunctionDef) -> bool:
"""
Function does nothing - is just ``pass`` or docstring.
""" |
return all(node_is_noop(n) for n in function_node.body) |
<SYSTEM_TASK:>
Adds "parent" attribute to all child nodes of passed node.
<END_TASK>
<USER_TASK:>
Description:
def add_node_parents(root: ast.AST) -> None:
"""
Adds "parent" attribute to all child nodes of passed node.
Code taken from https://stackoverflow.com/a/43311383/1286705
""" |
for node in ast.walk(root):
for child in ast.iter_child_nodes(node):
child.parent = node |
<SYSTEM_TASK:>
Generates a list of lines that the passed node covers, relative to the
<END_TASK>
<USER_TASK:>
Description:
def build_footprint(node: ast.AST, first_line_no: int) -> Set[int]:
"""
Generates a list of lines that the passed node covers, relative to the
marked lines list - i.e. start of function... |
return set(
range(
get_first_token(node).start[0] - first_line_no,
get_last_token(node).end[0] - first_line_no + 1,
)
) |
<SYSTEM_TASK:>
Finds all nodes that are before the ``max_line_number`` and are not
<END_TASK>
<USER_TASK:>
Description:
def filter_arrange_nodes(nodes: List[ast.stmt], max_line_number: int) -> List[ast.stmt]:
"""
Finds all nodes that are before the ``max_line_number`` and are not
docstrings or ``pass``.
... |
return [
node for node in nodes if node.lineno < max_line_number and not isinstance(node, ast.Pass)
and not (isinstance(node, ast.Expr) and isinstance(node.value, ast.Str))
] |
<SYSTEM_TASK:>
Finds all nodes that are after the ``min_line_number``
<END_TASK>
<USER_TASK:>
Description:
def filter_assert_nodes(nodes: List[ast.stmt], min_line_number: int) -> List[ast.stmt]:
"""
Finds all nodes that are after the ``min_line_number``
""" |
return [node for node in nodes if node.lineno > min_line_number] |
<SYSTEM_TASK:>
Finds all lines that contain a string in a tree, usually a function. These
<END_TASK>
<USER_TASK:>
Description:
def find_stringy_lines(tree: ast.AST, first_line_no: int) -> Set[int]:
"""
Finds all lines that contain a string in a tree, usually a function. These
lines will be ignored when sear... |
str_footprints = set()
for node in ast.walk(tree):
if isinstance(node, ast.Str):
str_footprints.update(build_footprint(node, first_line_no))
return str_footprints |
<SYSTEM_TASK:>
Run everything required for checking this function.
<END_TASK>
<USER_TASK:>
Description:
def check_all(self) -> Generator[AAAError, None, None]:
"""
Run everything required for checking this function.
Returns:
A generator of errors.
Raises:
Valida... |
# Function def
if function_is_noop(self.node):
return
self.mark_bl()
self.mark_def()
# ACT
# Load act block and kick out when none is found
self.act_node = self.load_act_node()
self.act_block = Block.build_act(self.act_node.node, self.node)
... |
<SYSTEM_TASK:>
Mark unprocessed lines that have no content and no string nodes
<END_TASK>
<USER_TASK:>
Description:
def mark_bl(self) -> int:
"""
Mark unprocessed lines that have no content and no string nodes
covering them as blank line BL.
Returns:
Number of blank lines fo... |
counter = 0
stringy_lines = find_stringy_lines(self.node, self.first_line_no)
for relative_line_number, line in enumerate(self.lines):
if relative_line_number not in stringy_lines and line.strip() == '':
counter += 1
self.line_markers[relative_line_nu... |
<SYSTEM_TASK:>
Function getParamFromEnv
<END_TASK>
<USER_TASK:>
Description:
def getParamFromEnv(self, var, default=''):
""" Function getParamFromEnv
Search a parameter in the host environment
@param var: the var name
@param hostgroup: the hostgroup item linked to this host
@par... |
if self.getParam(var):
return self.getParam(var)
if self.hostgroup:
if self.hostgroup.getParam(var):
return self.hostgroup.getParam(var)
if self.domain.getParam('password'):
return self.domain.getParam('password')
else:
ret... |
<SYSTEM_TASK:>
Function getUserData
<END_TASK>
<USER_TASK:>
Description:
def getUserData(self,
hostgroup,
domain,
defaultPwd='',
defaultSshKey='',
proxyHostname='',
tplFolder='metadata/templates/'):
... |
if 'user-data' in self.keys():
return self['user-data']
else:
self.hostgroup = hostgroup
self.domain = domain
if proxyHostname == '':
proxyHostname = 'foreman.' + domain['name']
password = self.getParamFromEnv('password', defau... |
<SYSTEM_TASK:>
Queue one or more payload for execution after its runner is started
<END_TASK>
<USER_TASK:>
Description:
def register_payload(self, *payloads, flavour: ModuleType):
"""Queue one or more payload for execution after its runner is started""" |
for payload in payloads:
self._logger.debug('registering payload %s (%s)', NameRepr(payload), NameRepr(flavour))
self.runners[flavour].register_payload(payload) |
<SYSTEM_TASK:>
Execute one payload after its runner is started and return its output
<END_TASK>
<USER_TASK:>
Description:
def run_payload(self, payload, *, flavour: ModuleType):
"""Execute one payload after its runner is started and return its output""" |
return self.runners[flavour].run_payload(payload) |
<SYSTEM_TASK:>
Run all runners, blocking until completion or error
<END_TASK>
<USER_TASK:>
Description:
def run(self):
"""Run all runners, blocking until completion or error""" |
self._logger.info('starting all runners')
try:
with self._lock:
assert not self.running.set(), 'cannot re-run: %s' % self
self.running.set()
thread_runner = self.runners[threading]
for runner in self.runners.values():
i... |
<SYSTEM_TASK:>
Hook for specifying the form Field instance for a given database Field
<END_TASK>
<USER_TASK:>
Description:
def formfield_for_dbfield(self, db_field, **kwargs):
"""
Hook for specifying the form Field instance for a given database Field
instance.
If kwargs are given, they'... |
formfield = super().formfield_for_dbfield(db_field, **kwargs)
if db_field.name == 'image':
formfield.widget = ImageRelatedFieldWidgetWrapper(
ImageSelect(), db_field.rel, self.admin_site, can_add_related=True,
can_change_related=True,
)
re... |
<SYSTEM_TASK:>
Compare two structures that represents JSON schemas.
<END_TASK>
<USER_TASK:>
Description:
def compare_schemas(one, two):
"""Compare two structures that represents JSON schemas.
For comparison you can't use normal comparison, because in JSON schema
lists DO NOT keep order (and Python lists do... |
one = _normalize_string_type(one)
two = _normalize_string_type(two)
_assert_same_types(one, two)
if isinstance(one, list):
return _compare_lists(one, two)
elif isinstance(one, dict):
return _compare_dicts(one, two)
elif isinstance(one, SCALAR_TYPES):
return one == two
... |
<SYSTEM_TASK:>
Check if given regex is of type ECMA 262 or not.
<END_TASK>
<USER_TASK:>
Description:
def is_ecma_regex(regex):
"""Check if given regex is of type ECMA 262 or not.
:rtype: bool
""" |
parts = regex.split('/')
if len(parts) == 1:
return False
if len(parts) < 3:
raise ValueError('Given regex isn\'t ECMA regex nor Python regex.')
parts.pop()
parts.append('')
raw_regex = '/'.join(parts)
if raw_regex.startswith('/') and raw_regex.endswith('/'):
retu... |
<SYSTEM_TASK:>
Convert ECMA 262 regex to Python tuple with regex and flags.
<END_TASK>
<USER_TASK:>
Description:
def convert_ecma_regex_to_python(value):
"""Convert ECMA 262 regex to Python tuple with regex and flags.
If given value is already Python regex it will be returned unchanged.
:param string valu... |
if not is_ecma_regex(value):
return PythonRegex(value, [])
parts = value.split('/')
flags = parts.pop()
try:
result_flags = [ECMA_TO_PYTHON_FLAGS[f] for f in flags]
except KeyError:
raise ValueError('Wrong flags "{}".'.format(flags))
return PythonRegex('/'.join(parts[... |
<SYSTEM_TASK:>
Convert Python regex to ECMA 262 regex.
<END_TASK>
<USER_TASK:>
Description:
def convert_python_regex_to_ecma(value, flags=[]):
"""Convert Python regex to ECMA 262 regex.
If given value is already ECMA regex it will be returned unchanged.
:param string value: Python regex.
:param list f... |
if is_ecma_regex(value):
return value
result_flags = [PYTHON_TO_ECMA_FLAGS[f] for f in flags]
result_flags = ''.join(result_flags)
return '/{value}/{flags}'.format(value=value, flags=result_flags) |
<SYSTEM_TASK:>
Get field associated with given attribute.
<END_TASK>
<USER_TASK:>
Description:
def get_field(self, field_name):
"""Get field associated with given attribute.""" |
for attr_name, field in self:
if field_name == attr_name:
return field
raise errors.FieldNotFound('Field not found', field_name) |
<SYSTEM_TASK:>
Explicitly validate all the fields.
<END_TASK>
<USER_TASK:>
Description:
def validate(self):
"""Explicitly validate all the fields.""" |
for name, field in self:
try:
field.validate_for_object(self)
except ValidationError as error:
raise ValidationError(
"Error for field '{name}'.".format(name=name),
error,
) |
<SYSTEM_TASK:>
Iterate over fields, but also give `structure_name`.
<END_TASK>
<USER_TASK:>
Description:
def iterate_with_name(cls):
"""Iterate over fields, but also give `structure_name`.
Format is `(attribute_name, structue_name, field_instance)`.
Structure name is name under which value is s... |
for attr_name, field in cls.iterate_over_fields():
structure_name = field.structue_name(attr_name)
yield attr_name, structure_name, field |
<SYSTEM_TASK:>
Parse value to proper model type.
<END_TASK>
<USER_TASK:>
Description:
def parse_value(self, value):
"""Parse value to proper model type.""" |
if not isinstance(value, dict):
return value
embed_type = self._get_embed_type()
return embed_type(**value) |
<SYSTEM_TASK:>
Parse string into instance of `time`.
<END_TASK>
<USER_TASK:>
Description:
def parse_value(self, value):
"""Parse string into instance of `time`.""" |
if value is None:
return value
if isinstance(value, datetime.time):
return value
return parse(value).timetz() |
<SYSTEM_TASK:>
Parse string into instance of `datetime`.
<END_TASK>
<USER_TASK:>
Description:
def parse_value(self, value):
"""Parse string into instance of `datetime`.""" |
if isinstance(value, datetime.datetime):
return value
if value:
return parse(value)
else:
return None |
<SYSTEM_TASK:>
Cast instance of model to python structure.
<END_TASK>
<USER_TASK:>
Description:
def to_struct(model):
"""Cast instance of model to python structure.
:param model: Model to be casted.
:rtype: ``dict``
""" |
model.validate()
resp = {}
for _, name, field in model.iterate_with_name():
value = field.__get__(model)
if value is None:
continue
value = field.to_struct(value)
resp[name] = value
return resp |
<SYSTEM_TASK:>
Prepare bloom for existing checks
<END_TASK>
<USER_TASK:>
Description:
def __prepare_bloom(self):
"""Prepare bloom for existing checks
""" |
self.__bloom = pybloom_live.ScalableBloomFilter()
columns = [getattr(self.__table.c, key) for key in self.__update_keys]
keys = select(columns).execution_options(stream_results=True).execute()
for key in keys:
self.__bloom.add(tuple(key)) |
<SYSTEM_TASK:>
Check if row exists in table
<END_TASK>
<USER_TASK:>
Description:
def __check_existing(self, row):
"""Check if row exists in table
""" |
if self.__update_keys is not None:
key = tuple(row[key] for key in self.__update_keys)
if key in self.__bloom:
return True
self.__bloom.add(key)
return False
return False |
<SYSTEM_TASK:>
Create SQL comment from field's title and description
<END_TASK>
<USER_TASK:>
Description:
def _get_field_comment(field, separator=' - '):
"""
Create SQL comment from field's title and description
:param field: tableschema-py Field, with optional 'title' and 'description' values
:param s... |
title = field.descriptor.get('title') or ''
description = field.descriptor.get('description') or ''
return _get_comment(description, title, separator) |
<SYSTEM_TASK:>
Restore bucket from SQL
<END_TASK>
<USER_TASK:>
Description:
def restore_bucket(self, table_name):
"""Restore bucket from SQL
""" |
if table_name.startswith(self.__prefix):
return table_name.replace(self.__prefix, '', 1)
return None |
<SYSTEM_TASK:>
Restore descriptor from SQL
<END_TASK>
<USER_TASK:>
Description:
def restore_descriptor(self, table_name, columns, constraints, autoincrement_column=None):
"""Restore descriptor from SQL
""" |
# Fields
fields = []
for column in columns:
if column.name == autoincrement_column:
continue
field_type = self.restore_type(column.type)
field = {'name': column.name, 'type': field_type}
if not column.nullable:
fie... |
<SYSTEM_TASK:>
Restore row from SQL
<END_TASK>
<USER_TASK:>
Description:
def restore_row(self, row, schema):
"""Restore row from SQL
""" |
row = list(row)
for index, field in enumerate(schema.fields):
if self.__dialect == 'postgresql':
if field.type in ['array', 'object']:
continue
row[index] = field.cast_value(row[index])
return row |
<SYSTEM_TASK:>
Restore type from SQL
<END_TASK>
<USER_TASK:>
Description:
def restore_type(self, type):
"""Restore type from SQL
""" |
# All dialects
mapping = {
ARRAY: 'array',
sa.Boolean: 'boolean',
sa.Date: 'date',
sa.DateTime: 'datetime',
sa.Float: 'number',
sa.Integer: 'integer',
JSONB: 'object',
JSON: 'object',
sa.Numeric... |
<SYSTEM_TASK:>
CreateFileType
<END_TASK>
<USER_TASK:>
Description:
def open_hierarchy(self, path, relative_to_object_id, object_id, create_file_type=0):
"""
CreateFileType
0 - Creates no new object.
1 - Creates a notebook with the specified name at the specified location.
... |
try:
return(self.process.OpenHierarchy(path, relative_to_object_id, "", create_file_type))
except Exception as e:
print(e)
print("Could not Open Hierarchy") |
<SYSTEM_TASK:>
NewPageStyle
<END_TASK>
<USER_TASK:>
Description:
def create_new_page (self, section_id, new_page_style=0):
"""
NewPageStyle
0 - Create a Page that has Default Page Style
1 - Create a blank page with no title
2 - Createa blank page that has no title
... |
try:
self.process.CreateNewPage(section_id, "", new_page_style)
except Exception as e:
print(e)
print("Unable to create the page") |
<SYSTEM_TASK:>
PageInfo
<END_TASK>
<USER_TASK:>
Description:
def get_page_content(self, page_id, page_info=0):
"""
PageInfo
0 - Returns only basic page content, without selection markup and binary data objects. This is the standard value to pass.
1 - Returns page content with no se... |
try:
return(self.process.GetPageContent(page_id, "", page_info))
except Exception as e:
print(e)
print("Could not get Page Content") |
<SYSTEM_TASK:>
SpecialLocation
<END_TASK>
<USER_TASK:>
Description:
def get_special_location(self, special_location=0):
"""
SpecialLocation
0 - Gets the path to the Backup Folders folder location.
1 - Gets the path to the Unfiled Notes folder location.
2 - Gets the path t... |
try:
return(self.process.GetSpecialLocation(special_location))
except Exception as e:
print(e)
print("Could not retreive special location") |
<SYSTEM_TASK:>
Determine memory specifications of the machine.
<END_TASK>
<USER_TASK:>
Description:
def memory():
"""Determine memory specifications of the machine.
Returns
-------
mem_info : dictonary
Holds the current values for the total, free and used memory of the system.
""" |
mem_info = dict()
for k, v in psutil.virtual_memory()._asdict().items():
mem_info[k] = int(v)
return mem_info |
<SYSTEM_TASK:>
Given a two-dimensional array with a dimension of size 'N',
<END_TASK>
<USER_TASK:>
Description:
def get_chunk_size(N, n):
"""Given a two-dimensional array with a dimension of size 'N',
determine the number of rows or columns that can fit into memory.
Parameters
----------
N : ... |
mem_free = memory()['free']
if mem_free > 60000000:
chunk_size = int(((mem_free - 10000000) * 1000) / (4 * n * N))
return chunk_size
elif mem_free > 40000000:
chunk_size = int(((mem_free - 7000000) * 1000) / (4 * n * N))
return chunk_size
elif mem_free > 14000000:
... |
<SYSTEM_TASK:>
Compute a weighted average of the mutual information with the known labels,
<END_TASK>
<USER_TASK:>
Description:
def ceEvalMutual(cluster_runs, cluster_ensemble = None, verbose = False):
"""Compute a weighted average of the mutual information with the known labels,
the weights being proport... |
if cluster_ensemble is None:
return 0.0
if reduce(operator.mul, cluster_runs.shape, 1) == max(cluster_runs.shape):
cluster_runs = cluster_runs.reshape(1, -1)
weighted_average_mutual_information = 0
N_labelled_indices = 0
for i in range(cluster_runs.shape[0]):
labelled_i... |
<SYSTEM_TASK:>
Ensure that a cluster labelling is in a valid format.
<END_TASK>
<USER_TASK:>
Description:
def checkcl(cluster_run, verbose = False):
"""Ensure that a cluster labelling is in a valid format.
Parameters
----------
cluster_run : array of shape (n_samples,)
A vector of cluster IDs... |
cluster_run = np.asanyarray(cluster_run)
if cluster_run.size == 0:
raise ValueError("\nERROR: Cluster_Ensembles: checkcl: "
"empty vector provided as input.\n")
elif reduce(operator.mul, cluster_run.shape, 1) != max(cluster_run.shape):
raise ValueError("\nERRO... |
<SYSTEM_TASK:>
Alter a vector of cluster labels to a dense mapping.
<END_TASK>
<USER_TASK:>
Description:
def one_to_max(array_in):
"""Alter a vector of cluster labels to a dense mapping.
Given that this function is herein always called after passing
a vector to the function checkcl, one_to_max re... |
x = np.asanyarray(array_in)
N_in = x.size
array_in = x.reshape(N_in)
sorted_array = np.sort(array_in)
sorting_indices = np.argsort(array_in)
last = np.nan
current_index = -1
for i in range(N_in):
if last != sorted_array[i] or np.isnan(last):
last = sorted_... |
<SYSTEM_TASK:>
Check that a matrix is a proper similarity matrix and bring
<END_TASK>
<USER_TASK:>
Description:
def checks(similarities, verbose = False):
"""Check that a matrix is a proper similarity matrix and bring
appropriate changes if applicable.
Parameters
----------
similarities : arr... |
if similarities.size == 0:
raise ValueError("\nERROR: Cluster_Ensembles: checks: the similarities "
"matrix provided as input happens to be empty.\n")
elif np.where(np.isnan(similarities))[0].size != 0:
raise ValueError("\nERROR: Cluster_Ensembles: checks: input si... |
<SYSTEM_TASK:>
METIS algorithm by Karypis and Kumar. Partitions the induced similarity graph
<END_TASK>
<USER_TASK:>
Description:
def metis(hdf5_file_name, N_clusters_max):
"""METIS algorithm by Karypis and Kumar. Partitions the induced similarity graph
passed by CSPA.
Parameters
----------
h... |
file_name = wgraph(hdf5_file_name)
labels = sgraph(N_clusters_max, file_name)
subprocess.call(['rm', file_name])
return labels |
<SYSTEM_TASK:>
Gives cluster labels ranging from 1 to N_clusters_max for
<END_TASK>
<USER_TASK:>
Description:
def hmetis(hdf5_file_name, N_clusters_max, w = None):
"""Gives cluster labels ranging from 1 to N_clusters_max for
hypergraph partitioning required for HGPA.
Parameters
----------
hdf... |
if w is None:
file_name = wgraph(hdf5_file_name, None, 2)
else:
file_name = wgraph(hdf5_file_name, w, 3)
labels = sgraph(N_clusters_max, file_name)
labels = one_to_max(labels)
subprocess.call(['rm', file_name])
return labels |
<SYSTEM_TASK:>
Obfuscate the auth details to avoid easy snatching.
<END_TASK>
<USER_TASK:>
Description:
def obfuscate(p, action):
"""Obfuscate the auth details to avoid easy snatching.
It's best to use a throw away account for these alerts to avoid having
your authentication put at risk by storing it local... |
key = "ru7sll3uQrGtDPcIW3okutpFLo6YYtd5bWSpbZJIopYQ0Du0a1WlhvJOaZEH"
s = list()
if action == 'store':
if PY2:
for i in range(len(p)):
kc = key[i % len(key)]
ec = chr((ord(p[i]) + ord(kc)) % 256)
s.append(ec)
return base64.urlsa... |
<SYSTEM_TASK:>
Go through and establish the defaults on the file system.
<END_TASK>
<USER_TASK:>
Description:
def _config_bootstrap(self):
"""Go through and establish the defaults on the file system.
The approach here was stolen from the CLI tool provided with the
module. Idea being that the us... |
if not os.path.exists(CONFIG_PATH):
os.makedirs(CONFIG_PATH)
if not os.path.exists(CONFIG_FILE):
json.dump(CONFIG_DEFAULTS, open(CONFIG_FILE, 'w'), indent=4,
separators=(',', ': '))
config = CONFIG_DEFAULTS
if self._email and self._password:... |
<SYSTEM_TASK:>
Attempt to authenticate the user through a session file.
<END_TASK>
<USER_TASK:>
Description:
def _session_check(self):
"""Attempt to authenticate the user through a session file.
This process is done to avoid having to authenticate the user every
single time. It uses a session f... |
if not os.path.exists(SESSION_FILE):
self._log.debug("Session file does not exist")
return False
with open(SESSION_FILE, 'rb') as f:
cookies = requests.utils.cookiejar_from_dict(pickle.load(f))
self._session.cookies = cookies
self._log.debug("... |
<SYSTEM_TASK:>
Override the default log level of the class
<END_TASK>
<USER_TASK:>
Description:
def set_log_level(self, level):
"""Override the default log level of the class""" |
if level == 'info':
level = logging.INFO
if level == 'debug':
level = logging.DEBUG
if level == 'error':
level = logging.ERROR
self._log.setLevel(level) |
<SYSTEM_TASK:>
Process the application state configuration.
<END_TASK>
<USER_TASK:>
Description:
def _process_state(self):
"""Process the application state configuration.
Google Alerts manages the account information and alert data through
some custom state configuration. Not all values have be... |
self._log.debug("Capturing state from the request")
response = self._session.get(url=self.ALERTS_URL, headers=self.HEADERS)
soup = BeautifulSoup(response.content, "html.parser")
for i in soup.findAll('script'):
if i.text.find('window.STATE') == -1:
continue
... |
<SYSTEM_TASK:>
Authenticate the user and setup our state.
<END_TASK>
<USER_TASK:>
Description:
def authenticate(self):
"""Authenticate the user and setup our state.""" |
valid = self._session_check()
if self._is_authenticated and valid:
self._log.debug("[!] User has already authenticated")
return
init = self._session.get(url=self.LOGIN_URL, headers=self.HEADERS)
soup = BeautifulSoup(init.content, "html.parser")
soup_login... |
<SYSTEM_TASK:>
List alerts configured for the account.
<END_TASK>
<USER_TASK:>
Description:
def list(self, term=None):
"""List alerts configured for the account.""" |
if not self._state:
raise InvalidState("State was not properly obtained from the app")
self._process_state()
if not self._state[1]:
self._log.info("No monitors have been created yet.")
return list()
monitors = list()
for monitor in self._stat... |
<SYSTEM_TASK:>
Let's handle old-style response processing here, as usual.
<END_TASK>
<USER_TASK:>
Description:
def process_response(self, request, response):
"""Let's handle old-style response processing here, as usual.""" |
# For debug only.
if not settings.DEBUG:
return response
# Check for responses where the data can't be inserted.
content_encoding = response.get('Content-Encoding', '')
content_type = response.get('Content-Type', '').split(';')[0]
if any((getattr(response, ... |
<SYSTEM_TASK:>
Mark a cached item invalid and trigger an asynchronous
<END_TASK>
<USER_TASK:>
Description:
def invalidate(self, *raw_args, **raw_kwargs):
"""
Mark a cached item invalid and trigger an asynchronous
job to refresh the cache
""" |
args = self.prepare_args(*raw_args)
kwargs = self.prepare_kwargs(**raw_kwargs)
key = self.key(*args, **kwargs)
item = self.cache.get(key)
if item is not None:
expiry, data = item
self.store(key, self.timeout(*args, **kwargs), data)
self.async_... |
<SYSTEM_TASK:>
Remove an item from the cache
<END_TASK>
<USER_TASK:>
Description:
def delete(self, *raw_args, **raw_kwargs):
"""
Remove an item from the cache
""" |
args = self.prepare_args(*raw_args)
kwargs = self.prepare_kwargs(**raw_kwargs)
key = self.key(*args, **kwargs)
item = self.cache.get(key)
if item is not None:
self.cache.delete(key) |
<SYSTEM_TASK:>
Manually set the cache value with its appropriate expiry.
<END_TASK>
<USER_TASK:>
Description:
def set(self, *raw_args, **raw_kwargs):
"""
Manually set the cache value with its appropriate expiry.
""" |
if self.set_data_kwarg in raw_kwargs:
data = raw_kwargs.pop(self.set_data_kwarg)
else:
raw_args = list(raw_args)
data = raw_args.pop()
args = self.prepare_args(*raw_args)
kwargs = self.prepare_kwargs(**raw_kwargs)
key = self.key(*args, **kwa... |
<SYSTEM_TASK:>
Add a result to the cache
<END_TASK>
<USER_TASK:>
Description:
def store(self, key, expiry, data):
"""
Add a result to the cache
:key: Cache key to use
:expiry: The expiry timestamp after which the result is stale
:data: The data to cache
""" |
self.cache.set(key, (expiry, data), self.cache_ttl)
if getattr(settings, 'CACHEBACK_VERIFY_CACHE_WRITE', True):
# We verify that the item was cached correctly. This is to avoid a
# Memcache problem where some values aren't cached correctly
# without warning.
... |
<SYSTEM_TASK:>
Fetch the result SYNCHRONOUSLY and populate the cache
<END_TASK>
<USER_TASK:>
Description:
def refresh(self, *args, **kwargs):
"""
Fetch the result SYNCHRONOUSLY and populate the cache
""" |
result = self.fetch(*args, **kwargs)
self.store(self.key(*args, **kwargs), self.expiry(*args, **kwargs), result)
return result |
<SYSTEM_TASK:>
Trigger an asynchronous job to refresh the cache
<END_TASK>
<USER_TASK:>
Description:
def async_refresh(self, *args, **kwargs):
"""
Trigger an asynchronous job to refresh the cache
""" |
# We trigger the task with the class path to import as well as the
# (a) args and kwargs for instantiating the class
# (b) args and kwargs for calling the 'refresh' method
try:
enqueue_task(
dict(
klass_str=self.class_path,
... |
<SYSTEM_TASK:>
Return whether to refresh an item synchronously when it is found in the
<END_TASK>
<USER_TASK:>
Description:
def should_stale_item_be_fetched_synchronously(self, delta, *args, **kwargs):
"""
Return whether to refresh an item synchronously when it is found in the
cache but stale
... |
if self.fetch_on_stale_threshold is None:
return False
return delta > (self.fetch_on_stale_threshold - self.lifetime) |
<SYSTEM_TASK:>
Return the cache key to use.
<END_TASK>
<USER_TASK:>
Description:
def key(self, *args, **kwargs):
"""
Return the cache key to use.
If you're passing anything but primitive types to the ``get`` method,
it's likely that you'll need to override this method.
""" |
if not args and not kwargs:
return self.class_path
try:
if args and not kwargs:
return "%s:%s" % (self.class_path, self.hash(args))
# The line might break if your passed values are un-hashable. If
# it does, you need to override this meth... |
<SYSTEM_TASK:>
Generate a hash of the given iterable.
<END_TASK>
<USER_TASK:>
Description:
def hash(self, value):
"""
Generate a hash of the given iterable.
This is for use in a cache key.
""" |
if is_iterable(value):
value = tuple(to_bytestring(v) for v in value)
return hashlib.md5(six.b(':').join(value)).hexdigest() |
<SYSTEM_TASK:>
Re-populate cache using the given job class.
<END_TASK>
<USER_TASK:>
Description:
def perform_async_refresh(cls, klass_str, obj_args, obj_kwargs, call_args, call_kwargs):
"""
Re-populate cache using the given job class.
The job class is instantiated with the passed constructor ar... |
klass = get_job_class(klass_str)
if klass is None:
logger.error("Unable to construct %s with args %r and kwargs %r",
klass_str, obj_args, obj_kwargs)
return
logger.info("Using %s with constructor args %r and kwargs %r",
klass... |
<SYSTEM_TASK:>
Decorate function to cache its return value.
<END_TASK>
<USER_TASK:>
Description:
def cacheback(lifetime=None, fetch_on_miss=None, cache_alias=None,
job_class=None, task_options=None, **job_class_kwargs):
"""
Decorate function to cache its return value.
:lifetime: How long to c... |
if job_class is None:
job_class = FunctionJob
job = job_class(lifetime=lifetime, fetch_on_miss=fetch_on_miss,
cache_alias=cache_alias, task_options=task_options,
**job_class_kwargs)
def _wrapper(fn):
# using available_attrs to work around http://bugs... |
<SYSTEM_TASK:>
Return the angle in radians between vectors 'v1' and 'v2'.
<END_TASK>
<USER_TASK:>
Description:
def angle(v1, v2):
"""Return the angle in radians between vectors 'v1' and 'v2'.""" |
v1_u = unit_vector(v1)
v2_u = unit_vector(v2)
return np.arccos(np.clip(np.dot(v1_u, v2_u), -1.0, 1.0)) |
<SYSTEM_TASK:>
Keep vertices with angles higher then given minimum.
<END_TASK>
<USER_TASK:>
Description:
def keep_high_angle(vertices, min_angle_deg):
"""Keep vertices with angles higher then given minimum.""" |
accepted = []
v = vertices
v1 = v[1] - v[0]
accepted.append((v[0][0], v[0][1]))
for i in range(1, len(v) - 2):
v2 = v[i + 1] - v[i - 1]
diff_angle = np.fabs(angle(v1, v2) * 180.0 / np.pi)
if diff_angle > min_angle_deg:
accepted.append((v[i][0], v[i][1]))
... |
<SYSTEM_TASK:>
Transform matplotlib.contourf to geojson with overlapping filled contours.
<END_TASK>
<USER_TASK:>
Description:
def contourf_to_geojson_overlap(contourf, geojson_filepath=None, min_angle_deg=None,
ndigits=5, unit='', stroke_width=1, fill_opacity=.9,
... |
polygon_features = []
contourf_idx = 0
for collection in contourf.collections:
color = collection.get_facecolor()
for path in collection.get_paths():
for coord in path.to_polygons():
if min_angle_deg:
coord = keep_high_angle(coord, min_angle_d... |
<SYSTEM_TASK:>
Get a qualified URL for the provider to return to upon authorization
<END_TASK>
<USER_TASK:>
Description:
def get_authorize_callback(endpoint, provider_id):
"""Get a qualified URL for the provider to return to upon authorization
param: endpoint: Absolute path to append to the application's host
... |
endpoint_prefix = config_value('BLUEPRINT_NAME')
url = url_for(endpoint_prefix + '.' + endpoint, provider_id=provider_id)
return request.url_root[:-1] + url |
<SYSTEM_TASK:>
Starts the provider login OAuth flow
<END_TASK>
<USER_TASK:>
Description:
def login(provider_id):
"""Starts the provider login OAuth flow""" |
provider = get_provider_or_404(provider_id)
callback_url = get_authorize_callback('login', provider_id)
post_login = request.form.get('next', get_post_login_redirect())
session[config_value('POST_OAUTH_LOGIN_SESSION_KEY')] = post_login
return provider.authorize(callback_url) |
<SYSTEM_TASK:>
Starts the provider connection OAuth flow
<END_TASK>
<USER_TASK:>
Description:
def connect(provider_id):
"""Starts the provider connection OAuth flow""" |
provider = get_provider_or_404(provider_id)
callback_url = get_authorize_callback('connect', provider_id)
allow_view = get_url(config_value('CONNECT_ALLOW_VIEW'))
pc = request.form.get('next', allow_view)
session[config_value('POST_OAUTH_CONNECT_SESSION_KEY')] = pc
return provider.authorize(cal... |
<SYSTEM_TASK:>
Remove all connections for the authenticated user to the
<END_TASK>
<USER_TASK:>
Description:
def remove_all_connections(provider_id):
"""Remove all connections for the authenticated user to the
specified provider
""" |
provider = get_provider_or_404(provider_id)
ctx = dict(provider=provider.name, user=current_user)
deleted = _datastore.delete_connections(user_id=current_user.get_id(),
provider_id=provider_id)
if deleted:
after_this_request(_commit)
msg = (... |
<SYSTEM_TASK:>
Remove a specific connection for the authenticated user to the
<END_TASK>
<USER_TASK:>
Description:
def remove_connection(provider_id, provider_user_id):
"""Remove a specific connection for the authenticated user to the
specified provider
""" |
provider = get_provider_or_404(provider_id)
ctx = dict(provider=provider.name, user=current_user,
provider_user_id=provider_user_id)
deleted = _datastore.delete_connection(user_id=current_user.get_id(),
provider_id=provider_id,
... |
<SYSTEM_TASK:>
Shared method to handle the connection process
<END_TASK>
<USER_TASK:>
Description:
def connect_handler(cv, provider):
"""Shared method to handle the connection process
:param connection_values: A dictionary containing the connection values
:param provider_id: The provider ID the connection ... |
cv.setdefault('user_id', current_user.get_id())
connection = _datastore.find_connection(
provider_id=cv['provider_id'], provider_user_id=cv['provider_user_id'])
if connection is None:
after_this_request(_commit)
connection = _datastore.create_connection(**cv)
msg = ('Connec... |
<SYSTEM_TASK:>
Shared method to handle the signin process
<END_TASK>
<USER_TASK:>
Description:
def login_handler(response, provider, query):
"""Shared method to handle the signin process""" |
connection = _datastore.find_connection(**query)
if connection:
after_this_request(_commit)
token_pair = get_token_pair_from_oauth_response(provider, response)
if (token_pair['access_token'] != connection.access_token or
token_pair['secret'] != connection.secret):
... |
<SYSTEM_TASK:>
Initialize the application with the Social extension
<END_TASK>
<USER_TASK:>
Description:
def init_app(self, app, datastore=None):
"""Initialize the application with the Social extension
:param app: The Flask application
:param datastore: Connection datastore instance
""" |
datastore = datastore or self.datastore
for key, value in default_config.items():
app.config.setdefault(key, value)
providers = dict()
for key, config in app.config.items():
if not key.startswith('SOCIAL_') or config is None or key in default_config:
... |
<SYSTEM_TASK:>
Creates a Postman object with TLS and Auth
<END_TASK>
<USER_TASK:>
Description:
def postman(host, port=587, auth=(None, None),
force_tls=False, options=None):
"""
Creates a Postman object with TLS and Auth
middleware. TLS is placed before authentication
because usually authent... |
return Postman(
host=host,
port=port,
middlewares=[
middleware.tls(force=force_tls),
middleware.auth(*auth),
],
**options
) |
<SYSTEM_TASK:>
Returns the finalised mime object, after
<END_TASK>
<USER_TASK:>
Description:
def mime(self):
"""
Returns the finalised mime object, after
applying the internal headers. Usually this
is not to be overriden.
""" |
mime = self.mime_object()
self.headers.prepare(mime)
return mime |
<SYSTEM_TASK:>
Try to find existing model class named `model_name`.
<END_TASK>
<USER_TASK:>
Description:
def get_existing_model(model_name):
""" Try to find existing model class named `model_name`.
:param model_name: String name of the model class.
""" |
try:
model_cls = engine.get_document_cls(model_name)
log.debug('Model `{}` already exists. Using existing one'.format(
model_name))
return model_cls
except ValueError:
log.debug('Model `{}` does not exist'.format(model_name)) |
<SYSTEM_TASK:>
Create referenced model if it doesn't exist.
<END_TASK>
<USER_TASK:>
Description:
def prepare_relationship(config, model_name, raml_resource):
""" Create referenced model if it doesn't exist.
When preparing a relationship, we check to see if the model that will be
referenced already exists. ... |
if get_existing_model(model_name) is None:
plural_route = '/' + pluralize(model_name.lower())
route = '/' + model_name.lower()
for res in raml_resource.root.resources:
if res.method.upper() != 'POST':
continue
if res.path.endswith(plural_route) or res... |
<SYSTEM_TASK:>
Generate model class.
<END_TASK>
<USER_TASK:>
Description:
def generate_model_cls(config, schema, model_name, raml_resource,
es_based=True):
""" Generate model class.
Engine DB field types are determined using `type_fields` and only those
types may be used.
:param... |
from nefertari.authentication.models import AuthModelMethodsMixin
base_cls = engine.ESBaseDocument if es_based else engine.BaseDocument
model_name = str(model_name)
metaclass = type(base_cls)
auth_model = schema.get('_auth_model', False)
bases = []
if config.registry.database_acls:
... |
<SYSTEM_TASK:>
Generates model name and runs `setup_data_model` to get
<END_TASK>
<USER_TASK:>
Description:
def handle_model_generation(config, raml_resource):
""" Generates model name and runs `setup_data_model` to get
or generate actual model class.
:param raml_resource: Instance of ramlfications.raml.Re... |
model_name = generate_model_name(raml_resource)
try:
return setup_data_model(config, raml_resource, model_name)
except ValueError as ex:
raise ValueError('{}: {}'.format(model_name, str(ex))) |
<SYSTEM_TASK:>
Set up model event subscribers.
<END_TASK>
<USER_TASK:>
Description:
def setup_model_event_subscribers(config, model_cls, schema):
""" Set up model event subscribers.
:param config: Pyramid Configurator instance.
:param model_cls: Model class for which handlers should be connected.
:para... |
events_map = get_events_map()
model_events = schema.get('_event_handlers', {})
event_kwargs = {'model': model_cls}
for event_tag, subscribers in model_events.items():
type_, action = event_tag.split('_')
event_objects = events_map[type_][action]
if not isinstance(event_objects... |
<SYSTEM_TASK:>
Set up model fields' processors.
<END_TASK>
<USER_TASK:>
Description:
def setup_fields_processors(config, model_cls, schema):
""" Set up model fields' processors.
:param config: Pyramid Configurator instance.
:param model_cls: Model class for field of which processors should be
set u... |
properties = schema.get('properties', {})
for field_name, props in properties.items():
if not props:
continue
processors = props.get('_processors')
backref_processors = props.get('_backref_processors')
if processors:
processors = [resolve_to_callable(va... |
<SYSTEM_TASK:>
Setup Pyramid AuthTktAuthenticationPolicy.
<END_TASK>
<USER_TASK:>
Description:
def _setup_ticket_policy(config, params):
""" Setup Pyramid AuthTktAuthenticationPolicy.
Notes:
* Initial `secret` params value is considered to be a name of config
param that represents a cookie name.
... |
from nefertari.authentication.views import (
TicketAuthRegisterView, TicketAuthLoginView,
TicketAuthLogoutView)
log.info('Configuring Pyramid Ticket Authn policy')
if 'secret' not in params:
raise ValueError(
'Missing required security scheme settings: secret')
para... |
<SYSTEM_TASK:>
Setup `nefertari.ApiKeyAuthenticationPolicy`.
<END_TASK>
<USER_TASK:>
Description:
def _setup_apikey_policy(config, params):
""" Setup `nefertari.ApiKeyAuthenticationPolicy`.
Notes:
* User may provide model name in :params['user_model']: do define
the name of the user model.
... |
from nefertari.authentication.views import (
TokenAuthRegisterView, TokenAuthClaimView,
TokenAuthResetView)
log.info('Configuring ApiKey Authn policy')
auth_model = config.registry.auth_model
params['check'] = auth_model.get_groups_by_token
params['credentials_callback'] = auth_mod... |
<SYSTEM_TASK:>
Setup authentication, authorization policies.
<END_TASK>
<USER_TASK:>
Description:
def setup_auth_policies(config, raml_root):
""" Setup authentication, authorization policies.
Performs basic validation to check all the required values are present
and performs authentication, authorization p... |
log.info('Configuring auth policies')
secured_by_all = raml_root.secured_by or []
secured_by = [item for item in secured_by_all if item]
if not secured_by:
log.info('API is not secured. `secured_by` attribute '
'value missing.')
return
secured_by = secured_by[0]
... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.