text_prompt stringlengths 157 13.1k | code_prompt stringlengths 7 19.8k ⌀ |
|---|---|
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def taskfileinfo_descriptor_data(tfi, role):
"""Return the data for descriptor :param tfi: the :class:`jukeboxcore.filesys.TaskFileInfo` holds the data :type tfi: :class:`jukeboxcore.filesys.TaskFileInfo` :param role: item data role :type role: QtCore.Qt.ItemDataRole :returns: data for the descriptor :rtype: depending on role :raises: None """ |
if role == QtCore.Qt.DisplayRole or role == QtCore.Qt.EditRole:
return tfi.descriptor |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def pick(self):
""" picks a value accoriding to the given density """ |
v = random.uniform(0, self.ub)
d = self.dist
c = self.vc - 1
s = self.vc
while True:
s = s / 2
if s == 0:
break
if v <= d[c][1]:
c -= s
else:
c += s
# we only need this logic when increasing c
while len(d) <= c:
s = s / 2
c -= s
if s == 0:
break
# we may have converged from the left, instead of the right
if c == len(d) or v <= d[c][1]:
c -= 1
return d[c][0] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def set_output_fields(self, output_fields):
"""Defines where to put the dictionary output of the extractor in the doc, but renames the fields of the extracted output for the document or just filters the keys""" |
if isinstance(output_fields, dict) or isinstance(output_fields, list):
self.output_fields = output_fields
elif isinstance(output_fields, basestring):
self.output_field = output_fields
else:
raise ValueError("set_output_fields requires a dictionary of "
+ "output fields to remap, a list of keys to filter, or a scalar string")
return self |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def __get_jp(self, extractor_processor, sub_output=None):
"""Tries to get name from ExtractorProcessor to filter on first. Otherwise falls back to filtering based on its metadata""" |
if sub_output is None and extractor_processor.output_field is None:
raise ValueError(
"ExtractorProcessors input paths cannot be unioned across fields. Please specify either a sub_output or use a single scalar output_field")
if extractor_processor.get_output_jsonpath_with_name(sub_output) is not None:
return extractor_processor.get_output_jsonpath_with_name(sub_output)
else:
return extractor_processor.get_output_jsonpath(sub_output) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def set_extractor_processor_inputs(self, extractor_processors, sub_output=None):
"""Instead of specifying fields in the source document to rename for the extractor, allows the user to specify ExtractorProcessors that are executed earlier in the chain and generate json paths from their output fields""" |
if not (isinstance(extractor_processors, ExtractorProcessor) or
isinstance(extractor_processors, types.ListType)):
raise ValueError(
"extractor_processors must be an ExtractorProcessor or a list")
if isinstance(extractor_processors, ExtractorProcessor):
extractor_processor = extractor_processors
self.input_fields = self.__get_jp(extractor_processor, sub_output)
elif isinstance(extractor_processors, types.ListType):
self.input_fields = list()
for extractor_processor in extractor_processors:
if isinstance(extractor_processor, ExtractorProcessor):
self.input_fields.append(
self.__get_jp(extractor_processor, sub_output))
elif isinstance(extractor_processor, list):
self.input_fields.append(
reduce(lambda a, b: "{}|{}".format(a, b),
["({})".format(self.__get_jp(x, sub_output))
for x in extractor_processor]))
self.generate_json_paths()
return self |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_output_jsonpath_field(self, sub_output=None):
"""attempts to create an output jsonpath from a particular ouput field""" |
if sub_output is not None:
if self.output_fields is None or\
(isinstance(self.output_fields, dict) and not sub_output in self.output_fields.itervalues()) or\
(isinstance(self.output_fields, list) and not sub_output in self.output_fields):
raise ValueError(
"Cannot generate output jsonpath because this ExtractorProcessor will not output {}".format(sub_output))
output_jsonpath_field = sub_output
else:
output_jsonpath_field = self.output_field
return output_jsonpath_field |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_output_jsonpath_with_name(self, sub_output=None):
"""If ExtractorProcessor has a name defined, return a JSONPath that has a filter on that name""" |
if self.name is None:
return None
output_jsonpath_field = self.get_output_jsonpath_field(sub_output)
extractor_filter = "name='{}'".format(self.name)
output_jsonpath = "{}[?{}].(result[*][value])".format(
output_jsonpath_field, extractor_filter)
return output_jsonpath |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_output_jsonpath(self, sub_output=None):
"""Attempt to build a JSONPath filter for this ExtractorProcessor that captures how to get at the outputs of the wrapped Extractor""" |
output_jsonpath_field = self.get_output_jsonpath_field(sub_output)
metadata = self.extractor.get_metadata()
metadata['source'] = str(self.input_fields)
extractor_filter = ""
is_first = True
for key, value in metadata.iteritems():
if is_first:
is_first = False
else:
extractor_filter = extractor_filter + " & "
if isinstance(value, basestring):
extractor_filter = extractor_filter\
+ "{}=\"{}\"".format(key,
re.sub('(?<=[^\\\])\"', "'", value))
elif isinstance(value, types.ListType):
extractor_filter = extractor_filter\
+ "{}={}".format(key, str(value))
output_jsonpath = "{}[?{}].result.value".format(
output_jsonpath_field, extractor_filter)
return output_jsonpath |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def set_input_fields(self, input_fields):
"""Given a scalar or ordered list of strings generate JSONPaths that describe how to access the values necessary for the Extractor """ |
if not (isinstance(input_fields, basestring) or
isinstance(input_fields, types.ListType)):
raise ValueError("input_fields must be a string or a list")
self.input_fields = input_fields
self.generate_json_paths()
return self |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def generate_json_paths(self):
"""Given a scalar or ordered list of strings parse them to generate JSONPaths""" |
if isinstance(self.input_fields, basestring):
try:
self.jsonpaths = parse(self.input_fields)
except Exception as exception:
print "input_fields failed {}".format(self.input_fields)
raise exception
elif isinstance(self.input_fields, types.ListType):
self.jsonpaths = list()
for input_field in self.input_fields:
self.jsonpaths.append(parse(input_field))
if len(self.jsonpaths) == 1:
self.jsonpaths = self.jsonpaths[0] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def insert_extracted_value(self, doc, extracted_value, output_field, original_output_field=None):
"""inserts the extracted value into doc at the field specified by output_field""" |
if not extracted_value:
return doc
metadata = self.extractor.get_metadata()
if not self.extractor.get_include_context():
if isinstance(extracted_value, list):
result = list()
for ev in extracted_value:
result.append({'value': ev})
else:
result = {'value': extracted_value}
else:
result = extracted_value
metadata['result'] = result
metadata['source'] = str(self.input_fields)
if original_output_field is not None:
metadata['original_output_field'] = original_output_field
if self.name is not None:
metadata['name'] = self.name
field_elements = output_field.split('.')
while len(field_elements) > 1:
field_element = field_elements.pop(0)
if '[' in field_element:
if not field_element.startswith('['):
array_field_elements = field_element.split('[', 1)
array_field_element = array_field_elements[0]
doc = doc[array_field_element]
field_element = array_field_elements[1]
array_elements = field_element.split(']')
for array_element in array_elements:
if not array_element:
continue
if array_element.startswith('['):
array_element = array_element[1:]
if array_element.isdigit() and isinstance(doc, list):
doc = doc[int(array_element)]
else:
doc = doc[array_element]
else:
if field_element not in doc:
doc[field_element] = {}
doc = doc[field_element]
field_element = field_elements[0]
if field_element in doc:
output = doc[field_element]
if isinstance(output, dict):
output = [output, metadata]
elif isinstance(output, types.ListType):
output.append(metadata)
else:
output = [metadata]
doc[field_element] = output |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def extract_from_renamed_inputs(self, doc, renamed_inputs):
"""Apply the extractor to a document containing the renamed_inputs and insert the resulting value if defined in the value field of a copy of the extractor's metadata and insert that into the doc""" |
extracted_value = self.extractor.extract(renamed_inputs)
if not extracted_value:
return doc
if self.output_fields is not None and isinstance(extracted_value, dict):
if isinstance(self.output_fields, list):
for field in self.output_fields:
if field in extracted_value:
self.insert_extracted_value(
doc, extracted_value[field], field)
elif isinstance(self.output_fields, dict):
for key, value in self.output_fields.iteritems():
if key in extracted_value:
self.insert_extracted_value(
doc, extracted_value[key], value, key)
else:
self.insert_extracted_value(
doc, extracted_value, self.output_field) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def db_for_write(self, model, **hints):
""" Attempts to write auth models go to duashttp. """ |
if model._meta.app_label == 'duashttp':
if not DUAS_ENABLE_DB_WRITE:
raise ImproperlyConfigured(
"Set `DUAS_ENABLE_DB_WRITE` to True in your settings to enable "
"write operations on unity asset server database"
)
return DUAS_DB_ROUTE_PREFIX
return None |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def allow_migrate(self, db, model):
""" Make sure the auth app only appears in the 'duashttp' database. """ |
if db == DUAS_DB_ROUTE_PREFIX:
return model._meta.app_label == 'duashttp'
elif model._meta.app_label == 'duashttp':
return False
return None |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def sometimes(fn):
""" They've done studies, you know. 50% of the time, it works every time. """ |
def wrapped(*args, **kwargs):
wrapped.x += 1
if wrapped.x % 2 == 1:
return fn(*args, **kwargs)
wrapped.x = 0
return wrapped |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def percent_of_the_time(p):
""" Function has a X percentage chance of running """ |
def decorator(fn):
def wrapped(*args, **kwargs):
if in_percentage(p):
fn(*args, **kwargs)
return wrapped
return decorator |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def rarely(fn):
""" Only 5% chance of happening """ |
def wrapped(*args, **kwargs):
if in_percentage(5):
fn(*args, **kwargs)
return wrapped |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def mostly(fn):
""" 95% chance of happening """ |
def wrapped(*args, **kwargs):
if in_percentage(95):
fn(*args, **kwargs)
return wrapped |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def times(x, y):
""" Do something a random amount of times between x & y """ |
def decorator(fn):
def wrapped(*args, **kwargs):
n = random.randint(x, y)
for z in range(1, n):
fn(*args, **kwargs)
return wrapped
return decorator |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def pformat_tokens(self, tokens):
""" format a tokenized BASIC program line. Useful for debugging. returns a list of formated string lines. """ |
result = []
for token_value in self.iter_token_values(tokens):
char = self.token2ascii(token_value)
if token_value > 0xff:
result.append("\t$%04x -> %s" % (token_value, repr(char)))
else:
result.append("\t $%02x -> %s" % (token_value, repr(char)))
return result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_destinations(self, ascii_listing):
""" returns all line numbers that are used in a jump. """ |
self.destinations = set()
def collect_destinations(matchobj):
numbers = matchobj.group("no")
if numbers:
self.destinations.update(set(
[n.strip() for n in numbers.split(",")]
))
for line in self._iter_lines(ascii_listing):
self.renum_regex.sub(collect_destinations, line)
return sorted([int(no) for no in self.destinations if no]) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_cell_format(column_dict, key=None):
""" Return the cell format for the given column :param column_dict: The column datas collected during inspection :param key: The exportation key """ |
format = column_dict.get('format')
prop = column_dict.get('__col__')
if format is None and prop is not None:
if hasattr(prop, 'columns'):
sqla_column = prop.columns[0]
column_type = getattr(sqla_column.type, 'impl', sqla_column.type)
format = FORMAT_REGISTRY.get_item(column_type)
return format |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def save_book(self, f_buf=None):
""" Return a file buffer containing the resulting xls :param obj f_buf: A file buffer supporting the write and seek methods """ |
if f_buf is None:
f_buf = StringIO.StringIO()
f_buf.write(openpyxl.writer.excel.save_virtual_workbook(self.book))
f_buf.seek(0)
return f_buf |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def set_color(self, cell, color):
""" Set the given color to the provided cell cell A xls cell object color A openpyxl color var """ |
cell.style = cell.style.copy(font=Font(color=Color(rgb=color))) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def format_row(self, row):
""" The render method expects rows as lists, here we switch our row format from dict to list respecting the order of the headers """ |
res = []
headers = getattr(self, 'headers', [])
for column in headers:
column_name = column['name']
value = row.get(column_name, '')
if hasattr(self, "format_%s" % column_name):
value = getattr(self, "format_%s" % column_name)(value)
res.append(value)
return res |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _render_rows(self):
""" Render the rows in the current stylesheet """ |
_datas = getattr(self, '_datas', ())
headers = getattr(self, 'headers', ())
for index, row in enumerate(_datas):
row_number = index + 2
for col_num, value in enumerate(row):
cell = self.worksheet.cell(row=row_number, column=col_num + 1)
if value is not None:
cell.value = value
else:
cell.value = ""
if len(headers) > col_num:
header = headers[col_num]
format = get_cell_format(header)
if format is not None:
cell.number_format = format |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _get_related_exporter(self, related_obj, column):
""" returns an SqlaXlsExporter for the given related object and stores it in the column object as a cache """ |
result = column.get('sqla_xls_exporter')
if result is None:
worksheet = self.book.create_sheet(
title=column.get('label', 'default title')
)
result = column['sqla_xls_exporter'] = SqlaXlsExporter(
related_obj.__class__,
worksheet=worksheet
)
return result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _populate(self):
""" Enhance the default populate script by handling related elements """ |
XlsWriter._populate(self)
for header in self.headers:
if "sqla_xls_exporter" in header:
header['sqla_xls_exporter']._populate() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def get_table_schema(self, tname):
''' Returns a list of column names of the provided table name '''
tname = self._check_tname(tname, noload=True)
if tname not in self._schemas:
raise ValueError('Table "%s" not found in schema store' % tname)
return list(self._schemas[tname]) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def load(self, table_names=None, table_schemas=None, table_rowgens=None):
'''
Initiates the tables, schemas and record generators for this database.
Parameters
----------
table_names : list of str, str or None
List of tables to load into this database. If `auto_load` is true, inserting a record
into a new table not provided here will automatically create that table.
table_schemas : dict of <table_name, column_list> or None
Dictionary with each table name as a key and a list of its columns as value. Any keys
present here but not present in `table_names` will also trigger table creation, so
table names provided in both parameters are redundant but harmless.
table_rowgens: dict of <table_name, function> or None
For all tables present in the keys of the provided dictionary, when an insert operation
occurs, the corresponding function is called. The function must return a dictionary and
is used as a "base record" which is complemented by the actual record being inserted.
For example, when a table has a rowgen like `lambda: {"Timestamp": time.ctime()}` and
a record like `{"Name": "John"}` is inserted, the database will then contain a record
like `{"Timestamp": "Sun Jan 10 08:36:12 2016", "Name": "John"}`.
'''
# Check for table schemas
if table_schemas is not None:
table_schemas = self._check_case_dict(table_schemas, warn=True)
for schema_key, schema_value in table_schemas.items():
table_schemas[schema_key] = self._check_columns(schema_value, add_id=True)
elif not self.dynamic_schema:
raise ValueError('Table schemas must be provided if dynamic schema is disabled')
# Check for row generators
if table_rowgens is not None:
table_rowgens = self._check_case_dict(table_rowgens, warn=True)
# If table_names is not directly provided, infer it from one of the other parameters
if table_names is None:
if table_schemas is not None:
table_names = list(table_schemas.keys())
self._print(
'Inferring table name from table_schemas for tables %r'% table_names)
elif table_rowgens is not None:
table_names = list(table_rowgens.keys())
self._print(
'Inferring table name from table_rowgens for tables %r' % table_names)
else:
req_params = 'table_names,table_schemas,table_rowgens'
raise ValueError(
'At least one of the parameters must be provided: [%s]' % req_params)
table_names = self._check_table_names(table_names, warn=True)
self._print('Loading tables %r' % table_names)
# Update schemas and row generators without losing previous ones
for tname in table_names:
if table_schemas is not None and tname in table_schemas:
self._schemas[tname] = list(table_schemas[tname]) # make a copy
if table_rowgens is not None and tname in table_rowgens:
self._rowgens[tname] = table_rowgens[tname]
with self._lock:
for tname in table_names:
# Standardize case, since Windows paths are case insensitive
tname = self._check_case_str(tname, warn=True)
# CSV has same filename as table under database folder
tpath = os.path.join(self.root_dir, self.name, tname + '.csv')
# Table already exists, simply load it
if os.path.isfile(tpath):
if self.auto_load:
dataframe = read_csv(tpath, dtype=str)
self._db[tname] = dataframe
schema = self._check_columns(dataframe.columns.tolist())
self._schemas[tname] = schema
elif self.persistent:
raise ValueError(
'Auto load tables is disabled but table "%s" already exists and would'
'be overwritten' % tname)
# Table not found, try to create it using given schema
elif table_schemas is not None and tname in self._schemas:
self._db[tname] = DataFrame(columns=self._schemas[tname], dtype=str)
# Table not found, dynamic schema
elif self.dynamic_schema:
self._print('Creating table "%s" using dynamic schema' % tname)
self._db[tname] = DataFrame(columns=self._blank_schema, dtype=str)
self._schemas[tname] = list(self._blank_schema)
# Table not found and schema not given when dynamic_schema not enabled
else:
raise ValueError(
'Table %s not found and schema was not passed as a parameter' % tname) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def drop_all(self):
''' Drops all tables from this database '''
self.drop(self.get_table_names())
if self.persistent:
with self._lock:
try:
dbfolder = os.path.join(self.root_dir, self.name)
if os.path.exists(dbfolder) and not os.listdir(dbfolder):
rmtree(dbfolder)
except (IOError, WindowsError):
self._print('Failed to delete folder %s when dropping database' % self.name)
finally:
del self |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def find(self, tname, where=None, where_not=None, columns=None, astype=None):
'''
Find records in the provided table from the database. If no records are found, return empty
list, str or dataframe depending on the value of `astype`.
Parameters
----------
tname : str
Table to search records from.
where : dict or None (default `None`)
Dictionary of <column, value> where value can be of str type for exact match or a
compiled regex expression for more advanced matching.
where_not : dict or None (default `None`)
Identical to `where` but for negative-matching.
columns: list of str, str or None (default `None`)
Column(s) to return for the found records, if any.
astype: str, type or None (default `None`)
Type to cast the output to. Possible values are: `nonetype`, `dataframe`, `str`,
`dict`, `json`. If this is `None`, falls back to the type provided to the constructor.
If a type was provided to the constructor but the user wants to avoid any casting,
"nonetype" should be passed as the value.
Returns
-------
records : str, list or dataframe
Output type depends on `astype` parameter.
Examples
--------
>>> db = PandasDatabase("test")
>>> db.insert("test", record={"Name": "John"})
Name John
__id__ dc876999-1f5b-4262-b6bf-c23b875f3a54
dtype: object
>>> db.find("test", astype="dict")
[{'Name': 'John', '__id__': 'dc876999-1f5b-4262-b6bf-c23b875f3a54'}]
>>> db.find("test", astype="dataframe")
__id__ Name
0 dc876999-1f5b-4262-b6bf-c23b875f3a54 John
>>> db.find("test", astype=None)
__id__ Name
0 dc876999-1f5b-4262-b6bf-c23b875f3a54 John
>>> db.find("test", where={"Name": "John"}, astype="dict")
[{'Name': 'John', '__id__': 'dc876999-1f5b-4262-b6bf-c23b875f3a54'}]
>>> db.find("test", where_not={"Name": "John"}, astype="dict")
[]
'''
try:
# Find is inherently read-only so don't try to autoload table
tname = self._check_tname(tname, noload=True)
except ValueError:
return self._output(DataFrame(), astype=astype)
where = PandasDatabase._check_conditions(where)
where_not = PandasDatabase._check_conditions(where_not)
columns = PandasDatabase._check_type_iter(str, columns)
dataframe = self._db[tname]
if len(columns) > 0 and len(dataframe) > 0:
dataframe = dataframe[columns]
# Parse the conditions to match
if len(where) > 0:
dataframe = dataframe[self._get_condition_mask(dataframe, where)]
# Parse the conditions not to match
if len(where_not) > 0:
dataframe = dataframe[~self._get_condition_mask(dataframe, where_not)]
self._print('Found %d records in table "%s" where %r and where not %r'
% (len(dataframe), tname, where, where_not))
return self._output(dataframe, astype=astype) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def find_one(self, tname, where=None, where_not=None, columns=None, astype=None):
'''
Find a single record in the provided table from the database. If multiple match, return
the first one based on the internal order of the records. If no records are found, return
empty dictionary, string or series depending on the value of `astype`.
Parameters
----------
tname : str
Table to search records from.
where : dict or None (default `None`)
Dictionary of <column, value> where value can be of str type for exact match or a
compiled regex expression for more advanced matching.
where_not : dict or None (default `None`)
Identical to `where` but for negative-matching.
columns: list of str, str or None (default `None`)
Column(s) to return for the found records, if any.
astype: str, type or None (default `None`)
Type to cast the output to. Possible values are: `nonetype`, `series`, `str`, `dict`,
`json`. If this is `None`, falls back to the type provided to the constructor.
If a type was provided to the constructor but the user wants to avoid any casting,
"nonetype" should be passed as the value.
Returns
-------
records : str, dict or series
Output type depends on `astype` parameter.
Examples
--------
>>> db = PandasDatabase("test")
>>> db.insert("test", record={"Name": "John"})
Name John
__id__ dc876999-1f5b-4262-b6bf-c23b875f3a54
dtype: object
>>> db.find_one("test", astype="dict")
{'Name': 'John', '__id__': 'dc876999-1f5b-4262-b6bf-c23b875f3a54'}
>>> db.find_one("test", astype="series")
__id__ dc876999-1f5b-4262-b6bf-c23b875f3a54
Name John
Name: 0, dtype: object
>>> db.find_one("test", astype=None)
__id__ dc876999-1f5b-4262-b6bf-c23b875f3a54
Name John
Name: 0, dtype: object
>>> db.find_one("test", where={"Name": "John"}, astype="dict")
{'Name': 'John', '__id__': 'dc876999-1f5b-4262-b6bf-c23b875f3a54'}
>>> db.find_one("test", where_not={"Name": "John"}, astype="dict")
{}
'''
records = self.find(tname, where=where, where_not=where_not, columns=columns,
astype='dataframe')
return self._output(records, single=True, astype=astype) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def insert(self, tname, record=None, columns=None, astype=None):
'''
Inserts record into the provided table from the database. Returns inserted record as
list, str or series depending on the value of `astype`.
Parameters
----------
tname : str
Table to insert records into.
where : dict or None (default `None`)
Dictionary of <column, value> where value can be of str type for exact match or a
compiled regex expression for more advanced matching.
where_not : dict or None (default `None`)
Identical to `where` but for negative-matching.
columns: list of str, str or None (default `None`)
Column(s) to return for the inserted records.
astype: str, type or None (default `None`)
Type to cast the output to. Possible values are: `nonetype`, `series`, `str`, `dict`
`json`. If this is `None`, falls back to the type provided to the constructor.
If a type was provided to the constructor but the user wants to avoid any casting,
"nonetype" should be passed as the value.
Returns
-------
record : str, dict or series
Inserted record. Output type depends on `astype` parameter.
Examples
--------
>>> db = PandasDatabase("test")
>>> db.insert("test", record={"Name": "John"})
Name John
__id__ dc876999-1f5b-4262-b6bf-c23b875f3a54
dtype: object
'''
tname = self._check_tname(tname)
record = PandasDatabase._check_dict_type(str, str, record, cast=self.auto_cast)
columns = PandasDatabase._check_type_iter(str, columns)
record[self._id_colname] = str(uuid.uuid4())
# If a row generation function exists for this table, use that
record_new = {}
if tname in self._rowgens:
self._print('Using row generator to create new record in "%s"' % tname)
record_new = self._rowgens[tname]()
# Set as many fields as provided in new record, leave the rest as-is
if record is not None:
for field_key, field_val in record.items():
record_new[field_key] = field_val
with self._lock:
self._print('Inserting new record into "%s": %r' % (tname, record_new))
self._update_schema(tname, record_new.keys())
row = Series(record_new)
self._db[tname].loc[len(self._db[tname])] = row
# Save the changes to disk if required
if self.auto_save:
self.save()
if len(columns) > 0:
row = row[columns]
return self._output(row, single=True, astype=astype) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def upsert(self, tname, record=None, where=None, where_not=None, columns=None, astype=None):
'''
Attempts to update records in the provided table from the database. If none are found,
inserts new record that would match all the conditions. Returns updated or inserted record
as list, dict, str, dataframe or series depending on the value of `astype`.
Parameters
----------
tname : str
Table to update or insert records into.
where : dict or None (default `None`)
Dictionary of <column, value> where value can be of str type for exact match or a
compiled regex expression for more advanced matching.
where_not : dict or None (default `None`)
Identical to `where` but for negative-matching.
columns: list of str, str or None (default `None`)
Column(s) to return for the updated or inserted records.
astype: str, type or None (default `None`)
Type to cast the output to. Possible values are: `nonetype`, `dataframe`, `series`,
`str`, `dict`, `json`. If this is `None`, falls back to the type provided to the
constructor. If a type was provided to the constructor but the user wants to avoid any
casting, "nonetype" should be passed as the value.
Returns
-------
records : list, dict, str, dataframe or series
Updated or inserted records. Output type depends on `astype` parameter.
Examples
--------
>>> db = PandasDatabase("test")
>>> db.upsert("test", record={"Name": "John", "Color": "Blue"})
Color Blue
Name John
__id__ a8f31bdd-8e57-4fa7-96f6-e6b20bf7a9dc
dtype: object
>>> db.upsert("test", where={"Name": "Jane", "Color": "Red"})
Color Red
Name Jane
__id__ 65c3bc2b-020c-48f0-b448-5fdb4e548abe
dtype: object
>>> db.upsert("test", record={"Color": "Yellow"}, where={"Name": "John"})
__id__ Name Color
0 a8f31bdd-8e57-4fa7-96f6-e6b20bf7a9dc John Yellow
'''
tname = self._check_tname(tname)
where = PandasDatabase._check_conditions(where)
where_not = PandasDatabase._check_conditions(where_not)
columns = PandasDatabase._check_type_iter(str, columns)
record = PandasDatabase._check_dict_type(str, str, record, cast=self.auto_cast)
# Attempt search only if where conditions are given
if (where is not None and len(where) > 0) \
or (where_not is not None and len(where_not) > 0):
ixs = self.find(tname, where=where, where_not=where_not, astype='dataframe').index
# If no records matched the where conditions, default to insert
if len(ixs) == 0:
self._print(
'Warning: No records in "%s" matched the conditions %s' % (tname, where))
# Add all the key-value pairs from the where condition
for cond_key, cond_value in where.items():
record[cond_key] = cond_value[0] if len(cond_value) > 0 else None
# Create a new record
record_new = self.insert(tname, record=record, columns=columns, astype='series')
# If the default value of the column provided in where_not conflicts, error out
if where_not is not None and any([record_new[cond_key] in cond_value
for cond_key, cond_value in where_not.items()]):
_id = PandasDatabase._id_colname
self.delete(tname, where={_id: record_new[_id]})
raise ValueError('Cannot insert new record because default values conflict '
'with conditions provided: %s' % where_not)
# Otherwise return created record
return self._output(record_new, astype=astype)
# If existing record(s) must be updated
elif len(ixs) > 0:
self._print('Updating %d record(s) in "%s" where %r and where not %r'
% (len(ixs), tname, where, where_not))
with self._lock:
self._update_schema(tname, record.keys())
for field_key, field_val in record.items():
self._db[tname].loc[ixs, field_key] = field_val
# Save the changes to disk if required
if self.auto_save:
self.save()
# Return updated records
rows = self._db[tname].loc[ixs]
if len(columns) > 0:
rows = rows[columns]
return self._output(rows, astype=astype)
# Insert if no where conditions are given
else:
# Return the new record
new_record = self.insert(tname, record=record, columns=columns, astype='series')
return self._output(new_record, astype=astype) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def _extract_params(request_dict, param_list, param_fallback=False):
''' Extract pddb parameters from request '''
if not param_list or not request_dict:
return dict()
query = dict()
for param in param_list:
# Retrieve all items in the form of {param: value} and
# convert {param__key: value} into {param: {key: value}}
for query_key, query_value in request_dict.items():
if param == query_key:
query[param] = query_value
else:
query_key_parts = query_key.split('__', 1)
if param == query_key_parts[0]:
query[param] = {query_key_parts[1]: query_value}
# Convert special string "__null__" into Python None
nullifier = lambda d: {k:(nullifier(v) if isinstance(v, dict) else # pylint: disable=used-before-assignment
(None if v == '__null__' else v)) for k, v in d.items()}
# When fallback is enabled and no parameter matched, assume query refers to first parameter
if param_fallback and all([param_key not in query.keys() for param_key in param_list]):
query = {param_list[0]: dict(request_dict)}
# Return a dictionary with only the requested parameters
return {k:v for k, v in nullifier(query).items() if k in param_list} |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def sha1(s):
""" Returns a sha1 of the given string """ |
h = hashlib.new('sha1')
h.update(s)
return h.hexdigest() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_translated_items(fapi, file_uri, use_cache, cache_dir=None):
""" Returns the last modified from smarterling """ |
items = None
cache_file = os.path.join(cache_dir, sha1(file_uri)) if use_cache else None
if use_cache and os.path.exists(cache_file):
print("Using cache file %s for translated items for: %s" % (cache_file, file_uri))
items = json.loads(read_from_file(cache_file))
if not items:
print("Downloading %s from smartling" % file_uri)
(response, code) = fapi.last_modified(file_uri)
items = response.data.items
if cache_file:
print("Caching %s to %s" % (file_uri, cache_file))
write_to_file(cache_file, json.dumps(items))
return items |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_translated_file(fapi, file_uri, locale, retrieval_type, include_original_strings, use_cache, cache_dir=None):
""" Returns a translated file from smartling """ |
file_data = None
cache_name = str(file_uri)+"."+str(locale)+"."+str(retrieval_type)+"."+str(include_original_strings)
cache_file = os.path.join(cache_dir, sha1(cache_name)) if cache_dir else None
if use_cache and os.path.exists(cache_file):
print("Using cache file %s for %s translation file: %s" % (cache_file, locale, file_uri))
file_data = read_from_file(cache_file)
elif not use_cache:
(file_data, code) = fapi.get(file_uri, locale,
retrievalType=retrieval_type,
includeOriginalStrings=include_original_strings)
file_data = str(file_data).strip()
if cache_file and code == 200 and len(file_data)>0:
print("Chaching to %s for %s translation file: %s" % (cache_file, locale, file_uri))
write_to_file(cache_file, file_data)
if not file_data or len(file_data)==0:
print("%s translation not found for %s" % (locale, file_uri))
return None
return file_data |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def upload_file(fapi, file_name, conf):
""" Uploads a file to smartling """ |
if not conf.has_key('file-type'):
raise SmarterlingError("%s doesn't have a file-type" % file_name)
print("Uploading %s to smartling" % file_name)
data = UploadData(
os.path.dirname(file_name)+os.sep,
os.path.basename(file_name),
conf.get('file-type'))
data.setUri(file_uri(file_name, conf))
if conf.has_key('approve-content'):
data.setApproveContent("true" if conf.get('approve-content', True) else "false")
if conf.has_key('callback-url'):
data.setCallbackUrl(conf.get('callback-url'))
for name, value in conf.get('directives', {}).items():
data.addDirective(SmartlingDirective(name, value))
(response, code) = fapi.upload(data)
if code!=200:
print(repr(response))
raise SmarterlingError("Error uploading file: %s" % file_name)
else:
print("Uploaded %s, wordCount: %s, stringCount: %s" % (file_name, response.data.wordCount, response.data.stringCount)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def create_file_api(conf):
""" Creates a SmartlingFileApi from the given config """ |
api_key = conf.config.get('api-key', os.environ.get('SMARTLING_API_KEY'))
project_id = conf.config.get('project-id', os.environ.get('SMARTLING_PROJECT_ID'))
if not project_id or not api_key:
raise SmarterlingError('config.api-key and config.project-id are required configuration items')
proxy_settings=None
if conf.config.has_key('proxy-settings'):
proxy_settings = ProxySettings(
conf.config.get('proxy-settings').get('username', ''),
conf.config.get('proxy-settings').get('password', ''),
conf.config.get('proxy-settings').get('host', ''),
int(conf.config.get('proxy-settings').get('port', '80')))
return SmartlingFileApiFactory().getSmartlingTranslationApi(
not conf.config.get('sandbox', False),
api_key,
project_id,
proxySettings=proxy_settings) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def parse_config(file_name='smarterling.config'):
""" Parses a smarterling configuration file """ |
if not os.path.exists(file_name) or not os.path.isfile(file_name):
raise SmarterlingError('Config file not found: %s' % file_name)
try:
contents = read_from_file(file_name)
contents_with_environment_variables_expanded = os.path.expandvars(contents)
return AttributeDict(yaml.load(contents_with_environment_variables_expanded))
except Exception as e:
raise SmarterlingError("Error paring config file: %s" % str(e)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get(self, key, default_val=None, require_value=False):
""" Returns a dictionary value """ |
val = dict.get(self, key, default_val)
if val is None and require_value:
raise KeyError('key "%s" not found' % key)
if isinstance(val, dict):
return AttributeDict(val)
return val |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def connect_widget(self, wid, getter=None, setter=None, signal=None, arg=None, update=True, flavour=None):
""" Finish set-up by connecting the widget. The model was already specified in the constructor. *wid* is a widget instance. *getter* is a callable. It is passed *wid* and must return its current value. *setter* is a callable. It is passed *wid* and the current value of the model property and must update the widget. *signal* is a string naming the signal to connect to on *wid*. When it is emitted we update the model. *getter*, *setter* and *signal* are optional. Missing values are guessed from *wid* using :meth:`gtkmvc3.adapters.default.search_adapter_info`. If nothing is found this raises :exc:`TypeError`. *arg* is an optional value passed to the handler for *signal*. This doesn't do anything unless a subclass overrides the handler. *update* denotes whether to update the widget from the model immediately. Otherwise the widget stays unchanged until the first notification. *flavour* can be used to select special behaviours about the adaptation when twice or more possibilities are possibly handled for the same widget type. See adapters.default for further information. """ |
if wid in self._wid_info:
raise ValueError("Widget " + str(wid) + " was already connected")
wid_type = None
if None in (getter, setter, signal):
w = search_adapter_info(wid, flavour)
if getter is None:
getter = w[GETTER]
if setter is None:
setter = w[SETTER]
wid_type = w[WIDTYPE]
if signal is None:
signal = w[SIGNAL]
# saves information about the widget
self._wid_info[wid] = (getter, setter, wid_type)
# connects the widget
if signal:
if arg:
wid.connect(signal, self._on_wid_changed, arg)
else:
wid.connect(signal, self._on_wid_changed)
self._wid = wid
# updates the widget:
if update:
self.update_widget() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _connect_model(self, model):
""" Used internally to connect the property into the model, and register self as a value observer for that property""" |
parts = self._prop_name.split(".")
if len(parts) > 1:
# identifies the model
models = parts[:-1]
Intermediate(model, models, self)
for name in models:
model = getattr(model, name)
if not isinstance(model, Model):
raise TypeError("Attribute '" + name +
"' was expected to be a Model, but found: " +
str(model))
prop = parts[-1]
else: prop = parts[0]
# prop is inside model?
if not hasattr(model, prop):
raise ValueError("Attribute '" + prop +
"' not found in model " + str(model))
# is it observable?
if model.has_property(prop):
# we need to create an observing method before registering
meth = types.MethodType(self._get_observer_fun(prop), self)
setattr(self, meth.__name__, meth)
self._prop = getattr(model, prop)
self._prop_name = prop
# registration of model:
self._model = model
self.observe_model(model) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _get_observer_fun(self, prop_name):
"""This is the code for an value change observer""" |
def _observer_fun(self, model, old, new):
if self._itsme:
return
self._on_prop_changed()
# doesn't affect stack traces
_observer_fun.__name__ = "property_%s_value_change" % prop_name
return _observer_fun |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _write_property(self, val, *args):
"""Sets the value of property. Given val is transformed accodingly to prop_write function when specified at construction-time. A try to cast the value to the property type is given.""" |
val_wid = val
# 'finally' would be better here, but not supported in 2.4 :(
try:
totype = type(self._get_property(*args))
if (totype is not type(None) and
(self._prop_cast or not self._prop_write)):
val = self._cast_value(val, totype)
if self._prop_write:
val = self._prop_write(val)
self._itsme = True
self._set_property(val, *args)
except ValueError:
self._itsme = False
if self._value_error:
self._value_error(self, self._prop_name, val_wid)
else:
raise
except:
self._itsme = False
raise
self._itsme = False |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _read_widget(self):
"""Returns the value currently stored into the widget, after transforming it accordingly to possibly specified function. This is implemented by calling the getter provided by the user. This method can raise InvalidValue (raised by the getter) when the value in the widget must not be considered as valid.""" |
getter = self._wid_info[self._wid][0]
return getter(self._wid) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _write_widget(self, val):
"""Writes value into the widget. If specified, user setter is invoked.""" |
self._itsme = True
try:
setter = self._wid_info[self._wid][1]
wtype = self._wid_info[self._wid][2]
if setter:
if wtype is not None:
setter(self._wid, self._cast_value(val, wtype))
else:
setter(self._wid, val)
finally:
self._itsme = False |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _on_prop_changed(self, instance, meth_name, res, args, kwargs):
"""Called by the observation code, when a modifying method is called""" |
Adapter._on_prop_changed(self) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _get_property(self, *args):
"""Private method that returns the value currently stored into the property""" |
val = self._getter(Adapter._get_property(self), *args)
if self._prop_read:
return self._prop_read(val, *args)
return val |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def distort(value):
""" Distorts a string by randomly replacing characters in it. :param value: a string to distort. :return: a distored string. """ |
value = value.lower()
if (RandomBoolean.chance(1, 5)):
value = value[0:1].upper() + value[1:]
if (RandomBoolean.chance(1, 3)):
value = value + random.choice(_symbols)
return value |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def pwm_start(self, channel, duty_cycle=None, frequency=None):
""" Starts the pwm signal on a channel. The channel should be defined as pwm prior to this call. If no duty_cycle or frequency is passed in this call previous values from call to define_as_pwm or pwm_start is used. :param channel: The channel to start the pwm signal on. :type channel: ``int`` :param duty_cycle: The duty cycle use on the channel. :type duty_cycle: ``int`` :param frequency: The frequency to be used on the pwm channel. :type frequency: ``int`` """ |
if frequency:
self.set_pwm_freq(frequency)
self.set_pwm(channel, 0, int(4096 * (duty_cycle/100))) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def set_pwm_freq(self, freq_hz):
"""Set the PWM frequency to the provided value in hertz.""" |
prescaleval = 25000000.0 # 25MHz
prescaleval /= 4096.0 # 12-bit
prescaleval /= float(freq_hz)
prescaleval -= 1.0
logger.debug('Setting PWM frequency to {0} Hz'.format(freq_hz))
logger.debug('Estimated pre-scale: {0}'.format(prescaleval))
prescale = int(math.floor(prescaleval + 0.5))
logger.debug('Final pre-scale: {0}'.format(prescale))
oldmode = self.i2c.read_U8(MODE1)
newmode = (oldmode & 0x7F) | 0x10 # sleep
self.i2c.write8(MODE1, newmode) # go to sleep
self.i2c.write8(PRESCALE, prescale)
self.i2c.write8(MODE1, oldmode)
time.sleep(0.005)
self.i2c.write8(MODE1, oldmode | 0x80) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def set_pwm(self, channel, on, off):
"""Sets a single PWM channel.""" |
self.i2c.write8(LED0_ON_L+4*channel, on & 0xFF)
self.i2c.write8(LED0_ON_H+4*channel, on >> 8)
self.i2c.write8(LED0_OFF_L+4*channel, off & 0xFF)
self.i2c.write8(LED0_OFF_H+4*channel, off >> 8) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def set_all_pwm(self, on, off):
"""Sets all PWM channels.""" |
self.i2c.write8(ALL_LED_ON_L, on & 0xFF)
self.i2c.write8(ALL_LED_ON_H, on >> 8)
self.i2c.write8(ALL_LED_OFF_L, off & 0xFF)
self.i2c.write8(ALL_LED_OFF_H, off >> 8) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def collect_ansible_classes():
"""Run playbook and collect classes of ansible that are run.""" |
def trace_calls(frame, event, arg): # pylint: disable=W0613
"""Trace function calls to collect ansible classes.
Trace functions and check if they have self as an arg. If so, get their class if the
class belongs to ansible.
"""
if event != 'call':
return
try:
_locals = inspect.getargvalues(frame).locals
if 'self' not in _locals:
return
_class = _locals['self'].__class__
_class_repr = repr(_class)
if 'ansible' not in _class_repr:
return
ANSIBLE_CLASSES[_class] = True
except (AttributeError, TypeError):
pass
print "Gathering classes"
sys.settrace(trace_calls)
main() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _parse_args():
"""Parse args and separate generator and playbook args.""" |
class HelpOnErrorArgParser(argparse.ArgumentParser):
"""Print help message as well when an error is raised."""
def error(self, message):
sys.stderr.write("Error: %s\n" % message)
self.print_help()
sys.exit(2)
def validate(_file):
"""Validate if the given target argument is a valid file or a valid directory"""
_file = abspath(_file)
if not exists(_file):
if not exists(dirname(_file)):
# Argparse uses the ArgumentTypeError to give a rejection message like:
# error: argument input: x does not exist
raise argparse.ArgumentTypeError("{0} does not exist".format(_file))
return _file
def expand_cg_args(cg_args):
"""Separate clubbed flags in command line args for the generator.py
Allows flags to be clubbed like, -ilt example.txt.
"""
expanded = list()
for item in cg_args:
if len(item) < 3:
# If at all a flag, must be single flag
expanded.append(item)
continue
if item.startswith("-"):
if not item.startswith("--"):
for flag in item[1:]:
expanded.append('-' + flag)
continue
expanded.append(item)
return expanded
class AssignDefaultIgnore(argparse.Action):
"""If argument is specified but nothing provided, use pre-defined.
nargs="*" doesn't allow const and default kwarg can't be used as we might not want to
ignore as well.
"""
def __call__(self, parser, args, values, option_string=None):
if values is not None and not len(values):
values = IGNORE_METHODS
setattr(args, self.dest, values)
try:
indx = sys.argv.index('--')
cg_args, sys.argv = sys.argv[1:indx], sys.argv[:1] + sys.argv[indx + 1:]
except ValueError:
cg_args = []
cg_args = expand_cg_args(cg_args) # allow -il type of usage
parser = HelpOnErrorArgParser(description=DESCRIPTION)
parser.add_argument(
"-l", "--long", action='store_true', default=False,
help="File reference of method in call graph is absolute, i.e. starts with ansible, "
"otherwise just the basename if not __init__.py")
parser.add_argument(
"-t", "--target", nargs="?", type=validate, const=TARGET_FILE, default=TARGET_FILE,
help="Filepath to write call graph, defaults to %(default)s")
parser.add_argument(
"-i", "--ignore", nargs='*', action=AssignDefaultIgnore,
help="Methods to ignore while generating call graph")
# TODO: Aloow classes that can be intercepted
parser.usage = \
parser.format_usage()[len("usage: "):].rstrip() + " -- <ansible-playbook options>\n"
cg_args = parser.parse_args(cg_args)
if not len(sys.argv[1:]):
parser.print_help()
sys.exit(2)
return cg_args |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def detect_process(cls, headers):
"""Returns tuple of process, legacy or None, None if not process originating.""" |
try:
if 'Libprocess-From' in headers:
return PID.from_string(headers['Libprocess-From']), False
elif 'User-Agent' in headers and headers['User-Agent'].startswith('libprocess/'):
return PID.from_string(headers['User-Agent'][len('libprocess/'):]), True
except ValueError as e:
log.error('Failed to detect process: %r' % e)
pass
return None, None |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def mount_process(self, process):
""" Mount a Process onto the http server to receive message callbacks. """ |
for route_path in process.route_paths:
route = '/%s%s' % (process.pid.id, route_path)
log.info('Mounting route %s' % route)
self.app.add_handlers('.*$', [(
re.escape(route),
RoutedRequestHandler,
dict(process=process, path=route_path)
)])
for message_name in process.message_names:
route = '/%s/%s' % (process.pid.id, message_name)
log.info('Mounting message handler %s' % route)
self.app.add_handlers('.*$', [(
re.escape(route),
WireProtocolMessageHandler,
dict(process=process, name=message_name)
)]) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def unmount_process(self, process):
""" Unmount a process from the http server to stop receiving message callbacks. """ |
# There is no remove_handlers, but .handlers is public so why not. server.handlers is a list of
# 2-tuples of the form (host_pattern, [list of RequestHandler]) objects. We filter out all
# handlers matching our process from the RequestHandler list for each host pattern.
def nonmatching(handler):
return 'process' not in handler.kwargs or handler.kwargs['process'] != process
def filter_handlers(handlers):
host_pattern, handlers = handlers
return (host_pattern, list(filter(nonmatching, handlers)))
self.app.handlers = [filter_handlers(handlers) for handlers in self.app.handlers] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def typevalue(self, key, value):
"""Given a parameter identified by ``key`` and an untyped string, convert that string to the type that our version of key has. """ |
def listconvert(value):
# this function might be called with both string
# represenations of entire lists and simple (unquoted)
# strings. String representations come in two flavours,
# the (legacy/deprecated) python literal (eg "['foo',
# 'bar']") and the simple (eg "foo, bar") The
# ast.literal_eval handles the first case, and if the
# value can't be parsed as a python expression, the second
# way is attempted. If both fail, it is returned verbatim
# (not wrapped in a list, for reasons)
try:
return ast.literal_eval(value)
except (SyntaxError, ValueError):
if "," in value:
return [x.strip() for x in value.split(",")]
else:
return value
# self.get(key) should never fail
default = self.get(key)
# if type(default) == type:
if inspect.isclass(default):
# print("Using class for %s" % key)
t = default
else:
# print("Using instance for %s" % key)
t = type(default)
if t == bool:
t = LayeredConfig.boolconvert
elif t == list:
t = listconvert
elif t == date:
t = LayeredConfig.dateconvert
elif t == datetime:
t = LayeredConfig.datetimeconvert
# print("Converting %r to %r" % (value,t(value)))
return t(value) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def generate_valid_keys():
""" create a list of valid keys """ |
valid_keys = []
for minimum, maximum in RANGES:
for i in range(ord(minimum), ord(maximum) + 1):
valid_keys.append(chr(i))
return valid_keys |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_configuration_file():
""" return jenks configuration file """ |
path = os.path.abspath(os.curdir)
while path != os.sep:
config_path = os.path.join(path, CONFIG_FILE_NAME)
if os.path.exists(config_path):
return config_path
path = os.path.dirname(path)
return None |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def generate_write_yaml_to_file(file_name):
""" generate a method to write the configuration in yaml to the method desired """ |
def write_yaml(config):
with open(file_name, 'w+') as fh:
fh.write(yaml.dump(config))
return write_yaml |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def load_grid_data(file_list, data_type="binary", sort=True, delim=" "):
""" Loads data from one or multiple grid_task files. Arguments: file_list - either a string or a list of strings indicating files to load data from. Files are assumed to be in grid_task.dat format (space delimited values, one per cell). data_type - a string representing what type of data is in the file. Either "binary", "int", "float", or "string". sort - If you're making a movie, you want the files to be in chronological order. By default, they will be sorted. If for some reason you don't want them in chronological order, set sort to False. Returns: A three-dimensional array. The first dimension is columns, the second is rows. At each row,column index in the array is another list which holds the values that each of the requested files has at that location in the grid. If you want this list collapsed to a single representative number, you should use agg_niche_grid. """ |
# If there's only one file, we pretend it's a list
if not type(file_list) is list:
file_list = [file_list]
elif sort:
# put file_list in chronological order
file_list.sort(key=lambda f: int(re.sub("[^0-9]", "", f)))
world_size = get_world_dimensions(file_list[0], delim)
# Initialize empty data array
data = initialize_grid(world_size, [])
# Loop through file list, reading in data
for f in file_list:
infile = open(f)
lines = infile.readlines()
for i in range(world_size[1]):
lines[i] = lines[i].strip().split(delim)
for j in range(world_size[0]):
if data_type == "binary":
val = bin(int(lines[i][j]))
elif data_type == "float":
val = float(lines[i][j])
elif data_type == "int":
val = int(lines[i][j])
elif data_type == "string":
val = str(lines[i][j])
else:
print("Unsupported data_type passed to load_grid")
return
data[i][j].append(val)
infile.close()
return data |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def make_niche_grid(res_dict, world_size=(60, 60)):
""" Converts dictionary specifying where resources are to nested lists specifying what sets of resources are where. res_dict - a dictionary in which keys are resources in the environment and values are list of tuples representing the cells they're in. world_size - a tuple indicating the dimensions of the world. Default = 60x60, because that's the default Avida world size Returns a list of lists of sets indicating the set of resources available at each x,y location in the Avida grid. """ |
# Initialize array to represent world
world = initialize_grid(world_size, set())
# Fill in data on niches present in each cell of the world
for res in res_dict:
for cell in res_dict[res]:
world[cell[1]][cell[0]].add(res)
return world |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def parse_environment_file_list(names, world_size=(60, 60)):
""" Extract information about spatial resources from all environment files in a list. Arguments: names - a list of strings representing the paths to the environment files. world_size - a tuple representing the x and y coordinates of the world. (default: 60x60) Returns a dictionary in which the keys are filenames and the values are list of lists of sets indicating the set of resources available at each x,y location in the Avida grid for that environment. """ |
# Convert single file to list if necessary
try:
names[0] = names[0]
except:
names = [names]
envs = []
for name in names:
envs.append(parse_environment_file(name, world_size))
return envs |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def parse_environment_file(filename, world_size=(60, 60)):
""" Extract information about spatial resources from an environment file. Arguments: filename - a string representing the path to the environment file. world_size - a tuple representing the x and y coordinates of the world. (default: 60x60) Returns a list of lists of sets indicating the set of resources available at each x,y location in the Avida grid. """ |
infile = open(filename)
lines = infile.readlines()
infile.close()
tasks = []
# Find all spatial resources and record which cells they're in
res_order = []
res_dict = {}
for line in lines:
if line.startswith("GRADIENT_RESOURCE"):
name, cells = parse_gradient(line, world_size)
elif line.startswith("CELL"):
name, cells = parse_cell(line, world_size)
elif line.startswith("REACTION"):
task = parse_reaction(line)
if task not in tasks:
tasks.append(task)
else:
continue
dict_increment(res_dict, name, cells)
if name not in res_order:
res_order.append(name)
# Create a map of niches across the environment and return it
grid = make_niche_grid(res_dict, world_size)
return EnvironmentFile(grid, res_order, world_size, filename, tasks) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def add_category_labels(level_name, cat_name, dataframe_needing_cat):
'''A function that adds a category name column to a pandas dataframe
:param level_name: an aggregation from elasticsearch results with nesting
:type level_name: elasticsearch response.aggregation object
:param cat_name: an aggregation from elasticsearch results with nesting
:type cat_name: elasticsearch response.aggregation object
:param dataframe_needing_cat: a pandas dataframe to append category name too
:type dataframe_needing_cat: elasticsearch response.aggregation object
:returns: pandas data frame like example above, with nested data
'''
cat_name_dataframe = pd.DataFrame(
[level_name for i in range(0, dataframe_needing_cat.shape[0])]) # create a cat name column
cat_name_dataframe.columns = [cat_name] # name the column something meaningful
return pd.concat([cat_name_dataframe, dataframe_needing_cat], axis=1) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def chained_set(self, value, command='set', *keys):
""" chained_set takes the value to enter into the dictionary, a command of what to do with the value, and a sequence of keys. Examples: d = {} d.chained_set(1,'append','level 1','level 2') -> d['level 1']['level 2'] = [1] d.chained_set(2,'append','level 1','level 2') -> d['level 1']['level 2'] = [1,2] """ |
new_object = self.__class__()
existing = self
for i in range(0, len(keys) - 1):
if keys[i] in existing:
existing = existing[keys[i]]
else:
existing[keys[i]] = new_object
existing = existing[keys[i]]
if command == 'set':
existing[keys[len(keys) - 1]] = value
elif command == 'append':
if keys[len(keys) - 1] in existing:
existing[keys[len(keys) - 1]].append(value)
else:
existing[keys[len(keys) - 1]] = [value]
elif command == 'set_or_append':
if keys[len(keys) - 1] in existing:
if type(keys[len(keys) - 1]) == type([]):
existing[keys[len(keys) - 1]].append(value)
else:
existing[keys[len(keys) - 1]] = [existing[keys[len(keys) - 1]], value]
else:
existing[keys[len(keys) - 1]] = value
elif command == 'insert':
if keys[len(keys) - 1] in existing:
if not value in existing[keys[len(keys) - 1]]:
existing[keys[len(keys) - 1]].append(value)
else:
existing[keys[len(keys) - 1]] = [value] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def set_renamed_input_fields(self, renamed_input_fields):
"""This method expects a scalar string or a list of input_fields to """ |
if not (isinstance(renamed_input_fields, basestring) or
isinstance(renamed_input_fields, ListType)):
raise ValueError("renamed_input_fields must be a string or a list")
self.renamed_input_fields = renamed_input_fields
return self |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def list_dir_abspath(path):
""" Return a list absolute file paths. see mkdir_p os.listdir. """ |
return map(lambda f: os.path.join(path, f), os.listdir(path)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_digest(self):
""" return int uuid number for digest :rtype: int :return: digest """ |
a, b = struct.unpack('>QQ', self.digest)
return (a << 64) | b |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_blob_hash(self, h=hashlib.md5):
""" get hash instance of blob content :param h: callable hash generator :type h: builtin_function_or_method :rtype: _hashlib.HASH :return: hash instance """ |
assert callable(h)
return h(self.get_blob_data()) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_blob_data(self, tag_target='asset', force=False):
""" get asset version content using pg large object streams :param bool force: False by default, forces get content from database instead of using cached value :rtype: str :return: content in raw format """ |
if hasattr(self, '_blob_data') and not force:
return self._blob_data
if six.PY2:
self._blob_data = six.binary_type('')
elif six.PY3:
self._blob_data = six.binary_type('', encoding='ascii')
asset_contents = self.contents.filter(tag=tag_target)
for asset_content in asset_contents:
blobs = asset_content.stream.get_blobs()
for blob in blobs:
self._blob_data += six.binary_type(blob.data)
return self._blob_data |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def run(self, obj):
"""Execute the actions on the given object. :param obj: The object that the action should process :type obj: :class:`object` :returns: None :rtype: None :raises: None """ |
for d in self.depsuccess:
if d.status.value != ActionStatus.SUCCESS:
self.status = ActionStatus(ActionStatus.SKIPPED, "Skipped because action \"%s\" did not succeed." % d.name)
return
for d in self.depfail:
if d.status.value == ActionStatus.SUCCESS:
self.status = ActionStatus(ActionStatus.SKIPPED, "Skipped because action \"%s\" did not fail." % d.name)
return
try:
self.status = self.actionfunc(obj)
if not isinstance(self.status, ActionStatus):
raise TypeError("Expected action function %s to return a ActionStatus" % self.actionfunc)
except:
self.status = ActionStatus(ActionStatus.ERROR, "Unexpected Error.", traceback.format_exc()) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def status(self, ):
"""The global status that summerizes all actions The status will be calculated in the following order: If any error occured, the status will be :data:`ActionStatus.ERROR`. If any failure occured, the status will be :data:`ActionStatus.FAILURE`. If all actions were successful or skipped, the status will be :data:`ActonStatus.SUCCESS` :returns: a status object that represents a summary of all actions :rtype: :class:`ActionStatus` :raises: None """ |
status = ActionStatus(ActionStatus.SUCCESS, "All actions succeeded.")
for a in self.actions:
if a.status.value == ActionStatus.ERROR:
status = ActionStatus(ActionStatus.ERROR, "Error: action \"%s\" raised an error!" % a.name, a.status.traceback)
break
if a.status.value == ActionStatus.FAILURE:
status = ActionStatus(ActionStatus.FAILURE, "Action(s) failed!")
return status |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def format_explanation(explanation, original_msg=None):
"""This formats an explanation Normally all embedded newlines are escaped, however there are three exceptions: \n{, \n} and \n~. The first two are intended cover nested explanations, see function and attribute explanations for examples (.visit_Call(), visit_Attribute()). The last one is for when one explanation needs to span multiple lines, e.g. when displaying diffs. """ |
if not conf.is_message_introspection_enabled() and original_msg:
return original_msg
explanation = ecu(explanation)
lines = _split_explanation(explanation)
result = _format_lines(lines)
return u('\n').join(result) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _split_explanation(explanation):
"""Return a list of individual lines in the explanation This will return a list of lines split on '\n{', '\n}' and '\n~'. Any other newlines will be escaped and appear in the line as the literal '\n' characters. """ |
raw_lines = (explanation or u('')).split('\n')
lines = [raw_lines[0]]
for l in raw_lines[1:]:
if l and l[0] in ['{', '}', '~', '>']:
lines.append(l)
else:
lines[-1] += '\\n' + l
return lines |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _format_lines(lines):
"""Format the individual lines This will replace the '{', '}' and '~' characters of our mini Return a list of formatted lines. """ |
result = lines[:1]
stack = [0]
stackcnt = [0]
for line in lines[1:]:
if line.startswith('{'):
if stackcnt[-1]:
s = u('and ')
else:
s = u('where ')
stack.append(len(result))
stackcnt[-1] += 1
stackcnt.append(0)
result.append(u(' +') + u(' ')*(len(stack)-1) + s + line[1:])
elif line.startswith('}'):
stack.pop()
stackcnt.pop()
result[stack[-1]] += line[1:]
else:
assert line[0] in ['~', '>']
stack[-1] += 1
indent = len(stack) if line.startswith('~') else len(stack) - 1
result.append(u(' ')*indent + line[1:])
assert len(stack) == 1
return result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _diff_text(left, right, verbose=False):
"""Return the explanation for the diff between text or bytes Unless --verbose is used this will skip leading and trailing characters which are identical to keep the diff minimal. If the input are bytes they will be safely converted to text. """ |
from difflib import ndiff
explanation = []
if isinstance(left, py.builtin.bytes):
left = u(repr(left)[1:-1]).replace(r'\n', '\n')
if isinstance(right, py.builtin.bytes):
right = u(repr(right)[1:-1]).replace(r'\n', '\n')
if not verbose:
i = 0 # just in case left or right has zero length
for i in range(min(len(left), len(right))):
if left[i] != right[i]:
break
if i > 42:
i -= 10 # Provide some context
explanation = [u('Skipping %s identical leading '
'characters in diff, use -v to show') % i]
left = left[i:]
right = right[i:]
if len(left) == len(right):
for i in range(len(left)):
if left[-i] != right[-i]:
break
if i > 42:
i -= 10 # Provide some context
explanation += [u('Skipping %s identical trailing '
'characters in diff, use -v to show') % i]
left = left[:-i]
right = right[:-i]
keepends = True
explanation += [line.strip('\n')
for line in ndiff(left.splitlines(keepends),
right.splitlines(keepends))]
return explanation |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def with_rule(self, rule):
""" Adds validation rule to this schema. This method returns reference to this exception to implement Builder pattern to chain additional calls. :param rule: a validation rule to be added. :return: this validation schema. """ |
self.rules = self.rules if self.rules != None else []
self.rules.append(rule)
return self |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _make_socket(cls, ip, port):
"""Bind to a new socket. If LIBPROCESS_PORT or LIBPROCESS_IP are configured in the environment, these will be used for socket connectivity. """ |
bound_socket = bind_sockets(port, address=ip)[0]
ip, port = bound_socket.getsockname()
if not ip or ip == '0.0.0.0':
ip = socket.gethostbyname(socket.gethostname())
return bound_socket, ip, port |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def stop(self):
"""Stops the context. This terminates all PIDs and closes all connections.""" |
log.info('Stopping %s' % self)
pids = list(self._processes)
# Clean up the context
for pid in pids:
self.terminate(pid)
while self._connections:
pid = next(iter(self._connections))
conn = self._connections.pop(pid, None)
if conn:
conn.close()
self.__loop.stop() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def spawn(self, process):
"""Spawn a process. Spawning a process binds it to this context and assigns the process a pid which is returned. The process' ``initialize`` method is called. Note: A process cannot send messages until it is bound to a context. :param process: The process to bind to this context. :type process: :class:`Process` :return: The pid of the process. :rtype: :class:`PID` """ |
self._assert_started()
process.bind(self)
self.http.mount_process(process)
self._processes[process.pid] = process
process.initialize()
return process.pid |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def dispatch(self, pid, method, *args):
"""Call a method on another process by its pid. The method on the other process does not need to be installed with ``Process.install``. The call is serialized with all other calls on the context's event loop. The pid must be bound to this context. This function returns immediately. :param pid: The pid of the process to be called. :type pid: :class:`PID` :param method: The name of the method to be called. :type method: ``str`` :return: Nothing """ |
self._assert_started()
self._assert_local_pid(pid)
function = self._get_dispatch_method(pid, method)
self.__loop.add_callback(function, *args) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def delay(self, amount, pid, method, *args):
"""Call a method on another process after a specified delay. This is equivalent to ``dispatch`` except with an additional amount of time to wait prior to invoking the call. This function returns immediately. :param amount: The amount of time to wait in seconds before making the call. :type amount: ``float`` or ``int`` :param pid: The pid of the process to be called. :type pid: :class:`PID` :param method: The name of the method to be called. :type method: ``str`` :return: Nothing """ |
self._assert_started()
self._assert_local_pid(pid)
function = self._get_dispatch_method(pid, method)
self.__loop.add_timeout(self.__loop.time() + amount, function, *args) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _maybe_connect(self, to_pid, callback=None):
"""Asynchronously establish a connection to the remote pid.""" |
callback = stack_context.wrap(callback or (lambda stream: None))
def streaming_callback(data):
# we are not guaranteed to get an acknowledgment, but log and discard bytes if we do.
log.info('Received %d bytes from %s, discarding.' % (len(data), to_pid))
log.debug(' data: %r' % (data,))
def on_connect(exit_cb, stream):
log.info('Connection to %s established' % to_pid)
with self._connection_callbacks_lock:
self._connections[to_pid] = stream
self.__dispatch_on_connect_callbacks(to_pid, stream)
self.__loop.add_callback(
stream.read_until_close,
exit_cb,
streaming_callback=streaming_callback)
create = False
with self._connection_callbacks_lock:
stream = self._connections.get(to_pid)
callbacks = self._connection_callbacks.get(to_pid)
if not stream:
self._connection_callbacks[to_pid].append(callback)
if not callbacks:
create = True
if stream:
self.__loop.add_callback(callback, stream)
return
if not create:
return
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
if not sock:
raise self.SocketError('Failed opening socket')
stream = IOStream(sock, io_loop=self.__loop)
stream.set_nodelay(True)
stream.set_close_callback(partial(self.__on_exit, to_pid, b'reached end of stream'))
connect_callback = partial(on_connect, partial(self.__on_exit, to_pid), stream)
log.info('Establishing connection to %s' % to_pid)
stream.connect((to_pid.ip, to_pid.port), callback=connect_callback)
if stream.closed():
raise self.SocketError('Failed to initiate stream connection')
log.info('Maybe connected to %s' % to_pid) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def send(self, from_pid, to_pid, method, body=None):
"""Send a message method from one pid to another with an optional body. Note: It is more idiomatic to send directly from a bound process rather than calling send on the context. If the destination pid is on the same context, the Context may skip the wire and route directly to process itself. ``from_pid`` must be bound to this context. This method returns immediately. :param from_pid: The pid of the sending process. :type from_pid: :class:`PID` :param to_pid: The pid of the destination process. :type to_pid: :class:`PID` :param method: The method name of the destination process. :type method: ``str`` :keyword body: Optional content to send along with the message. :type body: ``bytes`` or None :return: Nothing """ |
self._assert_started()
self._assert_local_pid(from_pid)
if self._is_local(to_pid):
local_method = self._get_local_mailbox(to_pid, method)
if local_method:
log.info('Doing local dispatch of %s => %s (method: %s)' % (from_pid, to_pid, local_method))
self.__loop.add_callback(local_method, from_pid, body or b'')
return
else:
# TODO(wickman) Consider failing hard if no local method is detected, otherwise we're
# just going to do a POST and have it dropped on the floor.
pass
request_data = encode_request(from_pid, to_pid, method, body=body)
log.info('Sending POST %s => %s (payload: %d bytes)' % (
from_pid, to_pid.as_url(method), len(request_data)))
def on_connect(stream):
log.info('Writing %s from %s to %s' % (len(request_data), from_pid, to_pid))
stream.write(request_data)
log.info('Wrote %s from %s to %s' % (len(request_data), from_pid, to_pid))
self.__loop.add_callback(self._maybe_connect, to_pid, on_connect) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def link(self, pid, to):
"""Link a local process to a possibly remote process. Note: It is more idiomatic to call ``link`` directly on the bound Process object instead. When ``pid`` is linked to ``to``, the termination of the ``to`` process (or the severing of its connection from the Process ``pid``) will result in the local process' ``exited`` method to be called with ``to``. This method returns immediately. :param pid: The pid of the linking process. :type pid: :class:`PID` :param to: The pid of the linked process. :type to: :class:`PID` :returns: Nothing """ |
self._assert_started()
def really_link():
self._links[pid].add(to)
log.info('Added link from %s to %s' % (pid, to))
def on_connect(stream):
really_link()
if self._is_local(pid):
really_link()
else:
self.__loop.add_callback(self._maybe_connect, to, on_connect) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def terminate(self, pid):
"""Terminate a process bound to this context. When a process is terminated, all the processes to which it is linked will be have their ``exited`` methods called. Messages to this process will no longer be delivered. This method returns immediately. :param pid: The pid of the process to terminate. :type pid: :class:`PID` :returns: Nothing """ |
self._assert_started()
log.info('Terminating %s' % pid)
process = self._processes.pop(pid, None)
if process:
log.info('Unmounting %s' % process)
self.http.unmount_process(process)
self.__erase_link(pid) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _page_gen(self):
""" Generates The String for pages """ |
track = ""
for page in self.__pages__:
track += "/{page}".format(page=page)
return track |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _query_gen(self):
"""Generates The String for queries""" |
return urlencode(self.__query__, safe=self.safe, querydelimiter=self.__querydelimiter__) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def to_nullable_string(value):
""" Converts value into string or returns None when value is None. :param value: the value to convert. :return: string value or None when value is None. """ |
if value == None:
return None
if type(value) == datetime.date:
return value.isoformat()
if type(value) == datetime.datetime:
if value.tzinfo == None:
return value.isoformat() + "Z"
else:
return value.isoformat()
if type(value) == list:
builder = ''
for element in value:
if len(builder) > 0:
builder = builder + ","
builder = builder + element
return builder.__str__()
return str(value) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def to_string_with_default(value, default_value):
""" Converts value into string or returns default when value is None. :param value: the value to convert. :param default_value: the default value. :return: string value or default when value is null. """ |
result = StringConverter.to_nullable_string(value)
return result if result != None else default_value |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def run(itf):
""" Run postanalyze functions. """ |
if not itf:
return 1
# access user input
options = SplitInput(itf)
# check input args
error_check(options)
# read input files
try:
molecules, ensemble_lookup = ReadFiles(options)
except:
return 1
if options.compare:
compare(molecules, ensemble_lookup, options)
else:
evaluate_list(molecules, ensemble_lookup, options) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def evaluate_list(molecules, ensemble_lookup, options):
""" Evaluate a list of ensembles and return statistics and ROC plots if appropriate """ |
# create stats dictionaries to store results from each ensemble
stats = {} # {file name : metric_List}
# print progress messages
if options.write_roc:
print(" Determining virtual screening performance and writing ROC data ... ")
print('')
else:
print(" Determining virtual screening performance ...")
print('')
for filename in sorted(ensemble_lookup.keys()):
metric_List = calculate_metrics(molecules, ensemble_lookup, filename, options)
stats[filename] = metric_List
# write results summary
output.write_summary(stats, options, fw_type = None)
# plot
if options.plot:
print(" Making plots ... ")
print
plotter(molecules, ensemble_lookup, options) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _sentry_context_dict(context):
"""Create a dict with context information for Sentry.""" |
d = {
"function_name": context.function_name,
"function_version": context.function_version,
"invoked_function_arn": context.invoked_function_arn,
"memory_limit_in_mb": context.memory_limit_in_mb,
"aws_request_id": context.aws_request_id,
"log_group_name": context.log_group_name,
"cognito_identity_id": context.identity.cognito_identity_id,
"cognito_identity_pool_id": context.identity.cognito_identity_pool_id}
for k, v in os.environ.items():
if k not in {"AWS_SECURITY_TOKEN", "AWS_SESSION_TOKEN",
"AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY"}:
# Do not log credentials
d[k] = v
return d |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def sentry_monitor(error_stream=None, **kwargs):
"""Sentry monitoring for AWS Lambda handler.""" |
def decorator(func):
"""A decorator that adds Sentry monitoring to a Lambda handler."""
def wrapper(event, context):
"""Wrap the target function."""
client = _setup_sentry_client(context)
try:
return func(event, context)
except (ProcessingError, OutOfOrderError) as err:
# A controlled exception from the Kinesis processor
_handle_processing_error(err, error_stream, client)
except Exception as err:
# Raise the exception and block the stream processor
if client:
client.captureException()
raise
return wrapper
return decorator |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.