text_prompt stringlengths 157 13.1k | code_prompt stringlengths 7 19.8k ⌀ |
|---|---|
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def stop(self):
"""Terminates the forked process. :return: """ |
distributed_logger.info('Stopping metrics aggregator')
self.process.terminate()
self.process.join()
distributed_logger.info('Stopped metrics aggregator') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def connect(self):
"""Connects to the 0MQ socket and starts publishing.""" |
distributed_logger.info('Connecting registry proxy to ZMQ socket %s', self.socket_addr)
self.zmq_context = zmq.Context()
sock = self.zmq_context.socket(zmq.PUB)
sock.set_hwm(0)
sock.setsockopt(zmq.LINGER, 0)
sock.connect(self.socket_addr)
distributed_logger.info('Connected registry proxy to ZMQ socket %s', self.socket_addr)
def _reset_socket(values):
for value in values:
try:
_reset_socket(value.values())
except AttributeError:
value.socket = sock
distributed_logger.debug('Resetting socket on metrics proxies')
_reset_socket(self.stats.values())
self.socket = sock
distributed_logger.debug('Reset socket on metrics proxies') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_version(*file_paths):
"""Retrieves the version from path""" |
filename = os.path.join(os.path.dirname(__file__), *file_paths)
print("Looking for version in: {}".format(filename))
version_file = open(filename).read()
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError('Unable to find version string.') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def resolve_str(name_or_func, module, default):
""" Resolve and return object from dotted name """ |
assert isinstance(name_or_func, str)
resolved = resolve(name_or_func, module=module)
if isinstance(resolved, ModuleType):
if not hasattr(resolved, default):
raise ImportError("{}.{}".format(resolved.__name__, default))
resolved = getattr(resolved, default)
if not callable(resolved):
raise TypeError("{!r} is not callable"
"".format(resolved))
return resolved |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def include(_name_or_func, *args, _module=None, _default='includeme', **kwargs):
""" Resolve and call functions """ |
if callable(_name_or_func):
resolved = _name_or_func
else:
resolved = resolve_str(_name_or_func, _module, _default)
resolved(*args, **kwargs) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def parse_requirements(self, filename):
""" Recursively find all the requirements needed storing them in req_parents, req_paths, req_linenos """ |
cwd = os.path.dirname(filename)
try:
fd = open(filename, 'r')
for i, line in enumerate(fd.readlines(), 0):
req = self.extract_requirement(line)
# if the line is not a requirement statement
if not req:
continue
req_path = req
if not os.path.isabs(req_path):
req_path = os.path.normpath(os.path.join(cwd, req_path))
if not os.path.exists(req_path):
logging.warning("Requirement '{0}' could not be resolved: '{1}' does not exist.".format(req, req_path))
if self.flags['cleanup']:
self.skip_unresolved_requirement(filename, i)
continue
# if the requirement is already added to the database, skip it
if req_path in self.req_paths:
logging.warning("Skipping duplicate requirement '{0}' at '{2}:{3}' [file '{1}'].".format(
req,
req_path,
filename,
i+1 # human-recognizable line number
))
if self.flags['cleanup']:
self.skip_unresolved_requirement(filename, i)
continue
# store requirements to the global database
self.req_parents.append(filename)
self.req_paths.append(req_path)
self.req_linenos.append(i)
# recursion
self.parse_requirements(req_path)
fd.close()
except IOError as err:
logging.warning("I/O error: {0}".format(err)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def replace_requirements(self, infilename, outfile_initial=None):
""" Recursively replaces the requirements in the files with the content of the requirements. Returns final temporary file opened for reading. """ |
infile = open(infilename, 'r')
# extract the requirements for this file that were not skipped from the global database
_indexes = tuple(z[0] for z in filter(lambda x: x[1] == infilename, enumerate(self.req_parents)))
req_paths = tuple(z[1] for z in filter(lambda x: x[0] in _indexes, enumerate(self.req_paths)))
req_linenos = tuple(z[1] for z in filter(lambda x: x[0] in _indexes, enumerate(self.req_linenos)))
if outfile_initial:
outfile = outfile_initial
else:
outfile = tempfile.TemporaryFile('w+')
# write the input file to the output, replacing
# the requirement statements with the requirements themselves
for i, line in enumerate(infile.readlines()):
if i in req_linenos:
req_path = req_paths[req_linenos.index(i)]
# skip unresolved requirement
if not req_path:
continue
# recursion
req_file = self.replace_requirements(req_path)
# insert something at cursor position
self.insert_requirement(outfile, req_file, req_path)
req_file.close()
else:
outfile.write(line)
infile.close()
if not outfile_initial:
outfile.seek(0)
return outfile |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def fatal(self, i: int=None) -> str: """ Returns a fatal error message """ |
head = "[" + colors.red("\033[1mfatal error") + "]"
if i is not None:
head = str(i) + " " + head
return head |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def error(self, i: int=None) -> str: """ Returns an error message """ |
head = "[" + colors.red("error") + "]"
if i is not None:
head = str(i) + " " + head
return head |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def warning(self, i: int=None) -> str: """ Returns a warning message """ |
head = "[" + colors.purple("\033[1mwarning") + "]"
if i is not None:
head = str(i) + " " + head
return head |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def info(self, i: int=None) -> str: """ Returns an info message """ |
head = "[" + colors.blue("info") + "]"
if i is not None:
head = str(i) + " " + head
return head |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def via(self, i: int=None) -> str: """ Returns an via message """ |
head = "[" + colors.green("via") + "]"
if i is not None:
head = str(i) + " " + head
return head |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def debug(self, i: int=None) -> str: """ Returns a debug message """ |
head = "[" + colors.yellow("debug") + "]"
if i is not None:
head = str(i) + " " + head
return head |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def scan(self):
"""Analyze state and queue tasks.""" |
log.debug("scanning machine: %s..." % self.name)
deployed = set()
services = yield self.client.get_children(self.path + "/services")
for name in services:
log.debug("checking service: '%s'..." % name)
try:
value, metadata = yield self.client.get(
self.path + "/services/" + name + "/machines"
)
except NoNodeException:
log.warn(
"missing machines declaration for service: %s." %
name
)
machines = []
else:
machines = json.loads(value)
if machines:
log.debug("machines: %s." % ", ".join(machines))
if self.name in machines:
deployed.add(name)
else:
log.debug("service not configured for any machine.")
count = len(deployed)
log.debug("found %d service(s) configured for this machine." % count)
running = yield self.client.get_children(
self.path + "/machines/" + self.name
)
self.stopped = deployed - set(running)
if self.stopped:
log.debug("services not running: %s." % ", ".join(
map(repr, self.stopped)))
elif running:
log.debug("all services are up.") |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def initialize(name='', pool_size=10, host='localhost', password='', port=5432, user=''):
"""Initialize a new database connection and return the pool object. Saves a reference to that instance in a module-level variable, so applications with only one database can just call this function and not worry about pool objects. """ |
global pool
instance = Pool(name=name, pool_size=pool_size, host=host, password=password, port=port, user=user)
pool = instance
return instance |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def query(self, sql: str, args: tuple = None):
"""Execute a SQL query with a return value.""" |
with self._cursor() as cursor:
log.debug('Running SQL: ' + str((sql, args)))
cursor.execute(sql, args)
return cursor.fetchall() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def items(self):
"""Returns a generator of available ICachableItem in the ICachableSource """ |
for dictreader in self._csv_dictreader_list:
for entry in dictreader:
item = self.factory()
item.key = self.key()
item.attributes = entry
try:
item.validate()
except Exception as e:
logger.debug("skipping entry due to item validation exception: %s", str(e))
continue
logger.debug("found validated item in CSV source, key: %s", str(item.attributes[self.key()]))
yield item |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def first(self):
"""Returns the first ICachableItem in the ICachableSource""" |
# we need to create a new object to insure we don't corrupt the generator count
csvsource = CSVSource(self.source, self.factory, self.key())
try:
item = csvsource.items().next()
return item
except StopIteration:
return None |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def register(name=None, exprresolver=None, params=None, reg=None):
"""Register an expression resolver. Can be used such as a decorator. For example all remainding expressions are the same. .. code-block:: python @register('myresolver') def myresolver(**kwargs):
pass .. code-block:: python def myresolver(**kwargs):
pass register('myresolver', myresolver) .. code-block:: python @register('myresolver') class MyResolver():
def __call__(**kwargs):
pass .. code-block:: python class MyResolver():
def __call__(**kwargs):
pass register('myresolver', MyResolver) .. code-block:: python class MyResolver():
def __call__(**kwargs):
pass register('myresolver', MyResolver()) :param str name: register name to use. Default is the function/class name. :param exprresolver: function able to resolve an expression. :param dict kwargs: parameters of resolver instanciation if exprresolver is a class and this function is used such as a decorator. :param ResolverRegistry reg: registry to use. Default is the global registry . """ |
def _register(exprresolver, _name=name, _params=params):
"""Local registration for better use in a decoration context.
:param exprresolver: function/class able to resolve an expression.
:param str _name: private parameter used to set the name.
:param dict _params: private parameter used to set resolver class
constructor parameters."""
_exprresolver = exprresolver
if isclass(exprresolver): # try to instanciate exprresolver
if _params is None:
_params = {}
_exprresolver = _exprresolver(**_params)
if _name is None:
_name = getname(exprresolver)
reg[_name] = _exprresolver
if reg.default is None:
reg.default = _name
return exprresolver
if name is None:
name = getname(exprresolver)
if reg is None:
reg = _RESOLVER_REGISTRY
if exprresolver is None:
result = _register
else:
result = name
_register(exprresolver)
return result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def getname(exprresolver):
"""Get expression resolver name. Expression resolver name is given by the attribute __resolver__. If not exist, then the it is given by the attribute __name__. Otherwise, given by the __class__.__name__ attribute. :raises: TypeError if exprresolver is not callable.""" |
result = None
if not callable(exprresolver):
raise TypeError('Expression resolver must be a callable object.')
result = getattr(
exprresolver, __RESOLVER__, getattr(
exprresolver, '__name__', getattr(
exprresolver.__class__, '__name__'
)
)
)
return result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def default(self, value):
"""Change of resolver name. :param value: new default value to use. :type value: str or callable :raises: KeyError if value is a string not already registered.""" |
if value is None:
if self:
value = list(self.keys())[0]
elif not isinstance(value, string_types):
value = register(exprresolver=value, reg=self)
elif value not in self:
raise KeyError(
'{0} not registered in {1}'.format(value, self)
)
self._default = value |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def wkhtmltopdf_args_mapping(data):
""" fix our names to wkhtmltopdf's args """ |
mapping = {
'cookies': 'cookie',
'custom-headers': 'custom-header',
'run-scripts': 'run-script'
}
return {mapping.get(k, k): v for k, v in data.items()} |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_name_dictionary_extractor(name_trie):
"""Method for creating default name dictionary extractor""" |
return DictionaryExtractor()\
.set_trie(name_trie)\
.set_pre_filter(VALID_TOKEN_RE.match)\
.set_pre_process(lambda x: x.lower())\
.set_metadata({'extractor': 'dig_name_dictionary_extractor'}) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def init_app(application):
""" Initialise an application Set up whitenoise to handle static files. """ |
config = {k: v for k, v in application.config.items() if k in SCHEMA}
kwargs = {'autorefresh': application.debug}
kwargs.update((k[11:].lower(), v) for k, v in config.items())
instance = whitenoise.WhiteNoise(application.wsgi_app, **kwargs)
instance.add_files(application.static_folder, application.static_url_path)
if not hasattr(application, 'extensions'):
application.extensions = {}
application.extensions['whitenoise'] = instance
application.wsgi_app = instance |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def env(key, default=_NOT_PROVIDED, cast=str, force=False, **kwargs):
""" Retrieve environment variables and specify default and options. :param key: (required) environment variable name to retrieve :param default: value to use if the environment var doesn't exist :param cast: values always come in as strings, cast to this type if needed :param force: force casting of value even when it may not be needed :param boolmap: if True use default map, otherwise you can pass custom map :param sticky: injects default into environment so child processes inherit NOTE: None can be passed as the default to avoid raising a KeyError """ |
boolmap = kwargs.get('boolmap', None)
sticky = kwargs.get('sticky', False)
value = os.environ.get(key, default)
if value is _NOT_PROVIDED:
raise KeyError(_ENV_ERROR_MSG.format(key))
if sticky and value == default:
try:
os.environ[key] = value
except TypeError:
os.environ[key] = str(value)
if force or (value != default and type(value) != cast):
if cast is bool and boolmap is not None:
value = boolean(value, boolmap=boolmap)
elif cast is bool:
value = boolean(value)
else:
value = cast(value)
return value |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def main(arguments):
"""Parse arguments, request the urls, notify if different.""" |
formatter_class = argparse.ArgumentDefaultsHelpFormatter
parser = argparse.ArgumentParser(description=__doc__,
formatter_class=formatter_class)
parser.add_argument('infile', help="Input file",
type=argparse.FileType('r'))
parser.add_argument('-o', '--outfile', help="Output file",
default=sys.stdout, type=argparse.FileType('w'))
args = parser.parse_args(arguments)
urls = args.infile.read().splitlines()
api_token = keyring.get_password('pushover', 'api_token')
pushover_user = keyring.get_password('pushover', 'user')
pushover = Pushover(api_token, pushover_user)
for url in urls:
domain = urlparse(url).netloc
urlpath = urlparse(url).path
url_dashes = re.sub(r'/', '-', urlpath)
cache = os.path.expanduser("~/.urlmon-cache")
if not os.path.isdir(cache):
os.mkdir(cache, mode=0o755)
filename = domain + url_dashes + '.html'
filepath = os.path.join(cache, filename)
html = requests.get(url).text
if os.path.isfile(filepath):
with open(filepath) as r:
before = r.read()
if html == before:
logger.info("{} is unchanged".format(url))
else:
msg = "{} changed".format(url)
logger.info(msg)
logger.debug(diff(before, html))
response = pushover.push(msg)
logger.debug("Pushover notification sent: "
"{}".format(response.status_code))
else:
logger.info("New url: {}".format(filename))
with open(filepath, 'w') as w:
w.write(html)
logger.info("Wrote file to cache: {}".format(filename)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def validate(self):
"""Validate the user and token, returns the Requests response.""" |
validate_url = "https://api.pushover.net/1/users/validate.json"
payload = {
'token': self.api_token,
'user': self.user,
}
return requests.post(validate_url, data=payload) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def push(self, message, device=None, title=None, url=None, url_title=None, priority=None, timestamp=None, sound=None):
"""Pushes the notification, returns the Requests response. Arguments: message -- your message Keyword arguments: device -- your user's device name to send the message directly to that device, rather than all of the user's devices title -- your message's title, otherwise your app's name is used url -- a supplementary URL to show with your message url_title -- a title for your supplementary URL, otherwise just the URL is shown priority -- send as --1 to always send as a quiet notification, 1 to display as high--priority and bypass the user's quiet hours, or 2 to also require confirmation from the user timestamp -- a Unix timestamp of your message's date and time to display to the user, rather than the time your message is received by our API sound -- the name of one of the sounds supported by device clients to override the user's default sound choice. """ |
api_url = 'https://api.pushover.net/1/messages.json'
payload = {
'token': self.api_token,
'user': self.user,
'message': message,
'device': device,
'title': title,
'url': url,
'url_title': url_title,
'priority': priority,
'timestamp': timestamp,
'sound': sound
}
return requests.post(api_url, params=payload) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _get_obj_attr(cls, obj, path, pos):
"""Resolve one kwargsql expression for a given object and returns its result. :param obj: the object to evaluate :param path: the list of all kwargsql expression, including those previously evaluated. :param int pos: provides index of the expression to evaluate in the `path` parameter. """ |
field = path[pos]
if isinstance(obj, (dict, Mapping)):
return obj[field], pos
elif isinstance(obj, (list, Sequence)):
join_operation = cls.SEQUENCE_OPERATIONS.get(field)
if join_operation is not None:
return (
AnySequenceResult(
cls._sequence_map(obj, path[pos + 1:]),
join_operation
),
len(path) + 1,
)
else:
return obj[int(field)], pos
else:
return getattr(obj, field, None), pos |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def get(ns, dburl=None):
'''
Get a default dones object for ns. If no dones object exists for ns yet,
a DbDones object will be created, cached, and returned.
'''
if dburl is None:
dburl = DONES_DB_URL
cache_key = (ns, dburl)
if ns not in DONES_CACHE:
dones_ns = 'dones_{}'.format(ns)
DONES_CACHE[cache_key] = DbDones(ns=dones_ns, dburl=dburl)
return DONES_CACHE[cache_key] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def open_conn(host, db, user, password, retries=0, sleep=0.5):
'''
Return an open mysql db connection using the given credentials. Use
`retries` and `sleep` to be robust to the occassional transient connection
failure.
retries: if an exception when getting the connection, try again at most this many times.
sleep: pause between retries for this many seconds. a float >= 0.
'''
assert retries >= 0
try:
return MySQLdb.connect(host=host, user=user, passwd=password, db=db)
except Exception:
if retries > 0:
time.sleep(sleep)
return open_conn(host, db, user, password, retries - 1, sleep)
else:
raise |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def open_url(url, retries=0, sleep=0.5):
'''
Open a mysql connection to a url. Note that if your password has
punctuation characters, it might break the parsing of url.
url: A string in the form "mysql://username:password@host.domain/database"
'''
return open_conn(retries=retries, sleep=sleep, **parse_url(url)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def _get_k(self):
'''
Accessing self.k indirectly allows for creating the kvstore table
if necessary.
'''
if not self.ready:
self.k.create() # create table if it does not exist.
self.ready = True
return self.k |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def clear(self):
'''
Remove all existing done markers and the file used to store the dones.
'''
if os.path.exists(self.path):
os.remove(self.path) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def done(self, key):
'''
return True iff key is marked done.
:param key: a json-serializable object.
'''
# key is not done b/c the file does not even exist yet
if not os.path.exists(self.path):
return False
is_done = False
done_line = self._done_line(key)
undone_line = self._undone_line(key)
with open(self.path) as fh:
for line in fh:
if line == done_line:
is_done = True
elif line == undone_line:
is_done = False
return is_done |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def are_done(self, keys):
'''
Return a list of boolean values corresponding to whether or not each
key in keys is marked done. This method can be faster than
individually checking each key, depending on how many keys you
want to check.
:param keys: a list of json-serializable keys
'''
# No keys are done b/c the file does not even exist yet.
if not os.path.exists(self.path):
return [False] * len(keys)
done_lines = set([self._done_line(key) for key in keys])
undone_lines = set([self._undone_line(key) for key in keys])
status = {}
with open(self.path) as fh:
for line in fh:
if line in done_lines:
# extract serialized key
status[line[5:-1]] = True
elif line in undone_lines:
status[line[5:-1]] = False
serialized_keys = [self._serialize(key) for key in keys]
return [status.get(sk, False) for sk in serialized_keys] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def add(self, key):
'''
add key to the namespace. it is fine to add a key multiple times.
'''
encodedKey = json.dumps(key)
with self.connect() as conn:
with doTransaction(conn):
sql = 'INSERT IGNORE INTO ' + self.table + ' (name) VALUES (%s)'
return insertSQL(conn, sql, args=[encodedKey]) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def remove(self, key):
'''
remove key from the namespace. it is fine to remove a key multiple times.
'''
encodedKey = json.dumps(key)
sql = 'DELETE FROM ' + self.table + ' WHERE name = %s'
with self.connect() as conn:
with doTransaction(conn):
return executeSQL(conn, sql, args=[encodedKey]) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def load_machine(self, descriptor):
""" Load a complete register machine. The descriptor is a map, unspecified values are loaded from the default values. """ |
def get_cfg(name):
if(name in descriptor):
return descriptor[name]
else:
return defaults[name]
self.processor = Processor(width = get_cfg("width"))
self.rom = ROM(get_cfg("rom_size"), get_cfg("rom_width"))
self.processor.register_memory_device(self.rom)
self.registers = []
if(get_cfg("ram_enable")):
self.ram = RAM(get_cfg("ram_size"), get_cfg("ram_width"))
self.processor.register_memory_device(self.ram)
else:
self.ram = None
if(get_cfg("flash_enable")):
self.flash = Flash(get_cfg("flash_size"), get_cfg("flash_width"))
self.processor.register_device(self.flash)
else:
self.flash = None
for register in get_cfg("registers"):
self.processor.add_register(register)
self.registers.append(register)
for command in get_cfg("commands"):
self.processor.register_command(command)
self.processor.setup_done() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def assemble_rom_code(self, asm):
""" assemble the given code and program the ROM """ |
stream = StringIO(asm)
worker = assembler.Assembler(self.processor, stream)
try:
result = worker.assemble()
except BaseException as e:
return e, None
self.rom.program(result)
return None, result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def assemble_flash_code(self, asm):
""" assemble the given code and program the Flash """ |
stream = StringIO(asm)
worker = assembler.Assembler(self.processor, stream)
try:
result = worker.assemble()
except BaseException as e:
return e, None
self.flash.program(result)
return None, result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def flush_devices(self):
""" overwrite the complete memory with zeros """ |
self.rom.program([0 for i in range(self.rom.size)])
self.flash.program([0 for i in range(self.flash.size)])
for i in range(self.ram.size):
self.ram.write(i, 0) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_rom(self, format_ = "nl"):
""" return a string representations of the rom """ |
rom = [self.rom.read(i) for i in range(self.rom.size)]
return self._format_mem(rom, format_) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_ram(self, format_ = "nl"):
""" return a string representations of the ram """ |
ram = [self.ram.read(i) for i in range(self.ram.size)]
return self._format_mem(ram, format_) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_flash(self, format_ = "nl"):
""" return a string representations of the flash """ |
flash = [self.flash.read(i) for i in range(self.flash.size)]
return self._format_mem(flash, format_) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def emit(self, action, payload=None, retry=0):
"""Emit action with payload. :param action: an action slug :param payload: data, default {} :param retry: integer, default 0. :return: information in form of dict. """ |
payload = payload or {}
if retry:
_retry = self.transport.retry(retry)
emit = _retry(self.transport.emit)
else:
emit = self.transport.emit
return emit(action, payload) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_upcoming_events(self):
""" Get upcoming PythonKC meetup events. Returns ------- List of ``pythonkc_meetups.types.MeetupEvent``, ordered by event time, ascending. Exceptions * PythonKCMeetupsBadJson * PythonKCMeetupsBadResponse * PythonKCMeetupsMeetupDown * PythonKCMeetupsNotJson * PythonKCMeetupsRateLimitExceeded """ |
query = urllib.urlencode({'key': self._api_key,
'group_urlname': GROUP_URLNAME})
url = '{0}?{1}'.format(EVENTS_URL, query)
data = self._http_get_json(url)
events = data['results']
return [parse_event(event) for event in events] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_past_events(self):
""" Get past PythonKC meetup events. Returns ------- List of ``pythonkc_meetups.types.MeetupEvent``, ordered by event time, descending. Exceptions * PythonKCMeetupsBadJson * PythonKCMeetupsBadResponse * PythonKCMeetupsMeetupDown * PythonKCMeetupsNotJson * PythonKCMeetupsRateLimitExceeded """ |
def get_attendees(event):
return [attendee for event_id, attendee in events_attendees
if event_id == event['id']]
def get_photos(event):
return [photo for event_id, photo in events_photos
if event_id == event['id']]
params = {'key': self._api_key,
'group_urlname': GROUP_URLNAME,
'status': 'past',
'desc': 'true'}
if self._num_past_events:
params['page'] = str(self._num_past_events)
query = urllib.urlencode(params)
url = '{0}?{1}'.format(EVENTS_URL, query)
data = self._http_get_json(url)
events = data['results']
event_ids = [event['id'] for event in events]
events_attendees = self.get_events_attendees(event_ids)
events_photos = self.get_events_photos(event_ids)
return [parse_event(event, get_attendees(event), get_photos(event))
for event in events] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_events_attendees(self, event_ids):
""" Get the attendees of the identified events. Parameters event_ids List of IDs of events to get attendees for. Returns ------- List of tuples of (event id, ``pythonkc_meetups.types.MeetupMember``). Exceptions * PythonKCMeetupsBadJson * PythonKCMeetupsBadResponse * PythonKCMeetupsMeetupDown * PythonKCMeetupsNotJson * PythonKCMeetupsRateLimitExceeded """ |
query = urllib.urlencode({'key': self._api_key,
'event_id': ','.join(event_ids)})
url = '{0}?{1}'.format(RSVPS_URL, query)
data = self._http_get_json(url)
rsvps = data['results']
return [(rsvp['event']['id'], parse_member_from_rsvp(rsvp))
for rsvp in rsvps
if rsvp['response'] != "no"] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_event_attendees(self, event_id):
""" Get the attendees of the identified event. Parameters event_id ID of the event to get attendees for. Returns ------- List of ``pythonkc_meetups.types.MeetupMember``. Exceptions * PythonKCMeetupsBadJson * PythonKCMeetupsBadResponse * PythonKCMeetupsMeetupDown * PythonKCMeetupsNotJson * PythonKCMeetupsRateLimitExceeded """ |
query = urllib.urlencode({'key': self._api_key,
'event_id': event_id})
url = '{0}?{1}'.format(RSVPS_URL, query)
data = self._http_get_json(url)
rsvps = data['results']
return [parse_member_from_rsvp(rsvp) for rsvp in rsvps
if rsvp['response'] != "no"] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_events_photos(self, event_ids):
""" Get photos for the identified events. Parameters event_ids List of IDs of events to get photos for. Returns ------- List of tuples of (event id, ``pythonkc_meetups.types.MeetupPhoto``). Exceptions * PythonKCMeetupsBadJson * PythonKCMeetupsBadResponse * PythonKCMeetupsMeetupDown * PythonKCMeetupsNotJson * PythonKCMeetupsRateLimitExceeded """ |
query = urllib.urlencode({'key': self._api_key,
'event_id': ','.join(event_ids)})
url = '{0}?{1}'.format(PHOTOS_URL, query)
data = self._http_get_json(url)
photos = data['results']
return [(photo['photo_album']['event_id'], parse_photo(photo))
for photo in photos] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_event_photos(self, event_id):
""" Get photos for the identified event. Parameters event_id ID of the event to get photos for. Returns ------- List of ``pythonkc_meetups.types.MeetupPhoto``. Exceptions * PythonKCMeetupsBadJson * PythonKCMeetupsBadResponse * PythonKCMeetupsMeetupDown * PythonKCMeetupsNotJson * PythonKCMeetupsRateLimitExceeded """ |
query = urllib.urlencode({'key': self._api_key,
'event_id': event_id})
url = '{0}?{1}'.format(PHOTOS_URL, query)
data = self._http_get_json(url)
photos = data['results']
return [parse_photo(photo) for photo in photos] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _http_get_json(self, url):
""" Make an HTTP GET request to the specified URL, check that it returned a JSON response, and returned the data parsed from that response. Parameters url The URL to GET. Returns ------- Dictionary of data parsed from a JSON HTTP response. Exceptions * PythonKCMeetupsBadJson * PythonKCMeetupsBadResponse * PythonKCMeetupsMeetupDown * PythonKCMeetupsNotJson * PythonKCMeetupsRateLimitExceeded """ |
response = self._http_get(url)
content_type = response.headers['content-type']
parsed_mimetype = mimeparse.parse_mime_type(content_type)
if parsed_mimetype[1] not in ('json', 'javascript'):
raise PythonKCMeetupsNotJson(content_type)
try:
return json.loads(response.content)
except ValueError as e:
raise PythonKCMeetupsBadJson(e) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _http_get(self, url):
""" Make an HTTP GET request to the specified URL and return the response. Retries ------- The constructor of this class takes an argument specifying the number of times to retry a GET. The statuses which are retried on are: 408, 500, 502, 503, and 504. Returns ------- An HTTP response, containing response headers and content. Exceptions * PythonKCMeetupsBadResponse * PythonKCMeetupsMeetupDown * PythonKCMeetupsRateLimitExceeded """ |
for try_number in range(self._http_retries + 1):
response = requests.get(url, timeout=self._http_timeout)
if response.status_code == 200:
return response
if (try_number >= self._http_retries or
response.status_code not in (408, 500, 502, 503, 504)):
if response.status_code >= 500:
raise PythonKCMeetupsMeetupDown(response, response.content)
if response.status_code == 400:
try:
data = json.loads(response.content)
if data.get('code', None) == 'limit':
raise PythonKCMeetupsRateLimitExceeded
except: # Don't lose original error when JSON is bad
pass
raise PythonKCMeetupsBadResponse(response, response.content) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def save():
""" Apply configuration changes on all the modules """ |
from .models import ModuleInfo
logger = logging.getLogger(__name__)
logger.info("Saving changes")
# Save + restart
for module in modules():
if module.enabled:
if module.changed:
module.save()
module.restart()
module.commit()
else:
logger.debug('Not saving unchanged module: %s' %
module.verbose_name)
else:
logger.debug('Not saving disabled module: %s' %
module.verbose_name)
# Commit
ModuleInfo.commit()
logger.info("Changes saved") |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_default(self):
""" If the user provided a default in the field definition, returns it, otherwise determines the default menus based on available choices and ``PAGE_MENU_TEMPLATES_DEFAULT``. Ensures the default is not mutable. """ |
if self._overridden_default:
# Even with user-provided default we'd rather not have it
# forced to text. Compare with Field.get_default().
if callable(self.default):
default = self.default()
else:
default = self.default
else:
# Depending on PAGE_MENU_TEMPLATES_DEFAULT:
# * None or no value: all choosable menus;
# * some sequence: specified menus;
# (* empty sequence: no menus).
default = getattr(settings, "PAGE_MENU_TEMPLATES_DEFAULT", None)
if default is None:
choices = self.get_choices(include_blank=False)
default = (c[0] for c in choices)
# Default can't be mutable, as references to it are shared among
# model instances; all sane values should be castable to a tuple.
return tuple(default) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _get_choices(self):
""" Returns menus specified in ``PAGE_MENU_TEMPLATES`` unless you provide some custom choices in the field definition. """ |
if self._overridden_choices:
# Note: choices is a property on Field bound to _get_choices().
return self._choices
else:
menus = getattr(settings, "PAGE_MENU_TEMPLATES", [])
return (m[:2] for m in menus) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def job_details(job_id, connection=None):
"""Returns the job data with its scheduled timestamp. :param job_id: the ID of the job to retrieve.""" |
if connection is None:
connection = r
data = connection.hgetall(job_key(job_id))
job_data = {'id': job_id, 'schedule_at': int(connection.zscore(REDIS_KEY,
job_id))}
for key, value in data.items():
try:
decoded = value.decode('utf-8')
except UnicodeDecodeError:
decoded = value
if decoded.isdigit():
decoded = int(decoded)
job_data[key.decode('utf-8')] = decoded
return job_data |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def schedule_job(job_id, schedule_in, connection=None, **kwargs):
"""Schedules a job. :param job_id: unique identifier for this job :param schedule_in: number of seconds from now in which to schedule the job or timedelta object. :param **kwargs: parameters to attach to the job, key-value structure. """ |
if not isinstance(schedule_in, int): # assumed to be a timedelta
schedule_in = schedule_in.days * 3600 * 24 + schedule_in.seconds
schedule_at = int(time.time()) + schedule_in
if connection is None:
connection = r
if 'id' in kwargs:
raise RuntimeError("'id' is a reserved key for the job ID")
with connection.pipeline() as pipe:
if schedule_at is not None:
args = (schedule_at, job_id)
if isinstance(connection, redis.Redis):
# StrictRedis or Redis don't have the same argument order
args = (job_id, schedule_at)
pipe.zadd(REDIS_KEY, *args)
delete = []
hmset = {}
for key, value in kwargs.items():
if value is None:
delete.append(key)
else:
hmset[key] = value
if hmset:
pipe.hmset(job_key(job_id), hmset)
if len(delete) > 0:
pipe.hdel(job_key(job_id), *delete)
pipe.execute() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def pending_jobs(reschedule_in=None, limit=None, connection=None):
"""Gets the job needing execution. :param reschedule_in: number of seconds in which returned jobs should be auto-rescheduled. If set to None (default), jobs are not auto-rescheduled. :param limit: max number of jobs to retrieve. If set to None (default), retrieves all pending jobs with no limit. """ |
if connection is None:
connection = r
start = None if limit is None else 0
job_ids = connection.zrangebyscore(REDIS_KEY, 0, int(time.time()),
start=start, num=limit)
with connection.pipeline() as pipe:
if reschedule_in is None:
for job_id in job_ids:
pipe.zrem(REDIS_KEY, job_id)
else:
schedule_at = int(time.time()) + reschedule_in
for job_id in job_ids:
args = (schedule_at, job_id)
if isinstance(connection, redis.Redis):
# StrictRedis or Redis don't have the same argument order
args = (job_id, schedule_at)
pipe.zadd(REDIS_KEY, *args)
pipe.execute()
with connection.pipeline() as pipe:
for job_id in job_ids:
pipe.hgetall(job_key(job_id.decode('utf-8')))
jobs = pipe.execute()
for job_id, data in izip(job_ids, jobs):
job_data = {'id': job_id.decode('utf-8')}
for key, value in data.items():
try:
decoded = value.decode('utf-8')
except UnicodeDecodeError:
decoded = value
if decoded.isdigit():
decoded = int(decoded)
job_data[key.decode('utf-8')] = decoded
yield job_data |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def scheduled_jobs(with_times=False, connection=None):
"""Gets all jobs in the scheduler. :param with_times: whether to return tuples with (job_id, timestamp) or just job_id as a list of strings. """ |
if connection is None:
connection = r
jobs = connection.zrangebyscore(REDIS_KEY, 0, sys.maxsize,
withscores=with_times)
for job in jobs:
if with_times:
yield job[0].decode('utf-8'), job[1]
else:
yield job.decode('utf-8') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def run(self, host=None, port=None, debug=None, **options):
""" Start the AgoraApp expecting the provided config to have at least REDIS and PORT fields. """ |
tasks = options.get('tasks', [])
for task in tasks:
if task is not None and hasattr(task, '__call__'):
_batch_tasks.append(task)
thread = Thread(target=self.batch_work)
thread.start()
try:
super(AgoraApp, self).run(host='0.0.0.0', port=self.config['PORT'], debug=True, use_reloader=False)
except Exception, e:
print e.message
self._stop_event.set()
if thread.isAlive():
thread.join() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _handle_exc(exception):
"""Record exception with stack trace to FogBugz via BugzScout, asynchronously. Returns an empty string. Note that this will not be reported to FogBugz until a celery worker processes this task. :param exception: uncaught exception thrown in app """ |
# Set the description to a familiar string with the exception
# message. Add the stack trace to extra.
bugzscout.ext.celery_app.submit_error.delay(
'http://fogbugz/scoutSubmit.asp',
'error-user',
'MyAppProject',
'Errors',
'An error occurred in MyApp: {0}'.format(exception.message),
extra=traceback.extract_tb(*sys.exc_info()))
# Return an empty body.
return [''] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def app(environ, start_response):
"""Simple WSGI application. Returns 200 OK response with 'Hellow world!' in the body for GET requests. Returns 405 Method Not Allowed for all other methods. Returns 500 Internal Server Error if an exception is thrown. The response body will not include the error or any information about it. The error and its stack trace will be reported to FogBugz via BugzScout, though. :param environ: WSGI environ :param start_response: function that accepts status string and headers """ |
try:
if environ['REQUEST_METHOD'] == 'GET':
start_response('200 OK', [('content-type', 'text/html')])
return ['Hellow world!']
else:
start_response(
'405 Method Not Allowed', [('content-type', 'text/html')])
return ['']
except Exception as ex:
# Call start_response with exception info.
start_response(
'500 Internal Server Error',
[('content-type', 'text/html')],
sys.exc_info())
# Record the error to FogBugz and this will return the body for the
# error response.
return _handle_exc(ex) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def add_cookbook_mgmt_options(parser):
"""Add the cookbook management command and arguments. :rtype: argparse.ArgumentParser """ |
cookbook = parser.add_parser('cookbook', help='Invoke in a Jenkins job to '
'update a cookbook in '
'chef-repo')
cookbook.add_argument('repo', action='store',
help='Git URL for chef-repo')
cookbook.set_defaults(func='process_cookbook') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def add_github_hook_options(parser):
"""Add the github jenkins hook command and arguments. :rtype: argparse.ArgumentParser """ |
cookbook = parser.add_parser('github', help='Install the Jenkins callback '
'hook in a GitHub repository')
cookbook.add_argument('owner', action='store',
help='The owner of the GitHub repo')
cookbook.add_argument('repo', action='store',
help='The GitHub repository name')
domain = socket.gethostname()
example = 'jenkins.%s' % domain
cookbook.add_argument('jenkins_hook_url', action='store',
help='The jenkins hook URL. For example %s' % example)
cookbook.add_argument('-g', '--github-host',
action='store',
dest='github',
default=github.GITHUB_HOST,
help='Override github.com for a '
'GitHub::Enterprise host')
cookbook.add_argument('-u', '--username',
action='store',
dest='username',
help='Specify a different username than the repo '
'owner')
cookbook.set_defaults(func='github_hooks') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def add_jenkins_job_options(parser):
"""Add a new job to Jenkins for updating chef-repo :rtype: argparse.ArgumentParser """ |
cookbook = parser.add_parser('jenkins', help='Add a new cookbook job to '
'Jenkins')
cookbook.add_argument('jenkins', action='store',
help='The jenkins server hostname')
cookbook.add_argument('name', action='store',
help='The cookbook name')
cookbook.add_argument('cookbook', action='store',
help='The cookbook git repository URL')
cookbook.add_argument('chef_repo', action='store',
help='The chef-repo git repository URL')
cookbook.add_argument('-u', '--username',
action='store',
dest='username',
default=pwd.getpwuid(os.getuid())[0],
help='Specify a different username than the repo '
'owner')
cookbook.add_argument('-n', '--hipchat-notification',
action='store',
dest='hipchat',
help='Hipchat room for notifications')
cookbook.set_defaults(func='new_job') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def argparser():
"""Build the argument parser :rtype: argparse.ArgumentParser """ |
parser = argparse.ArgumentParser(description=__description__)
sparser = parser.add_subparsers()
add_cookbook_mgmt_options(sparser)
add_role_options(sparser)
add_github_hook_options(sparser)
add_jenkins_job_options(sparser)
return parser |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def render_field_description(field):
""" Render a field description as HTML. """ |
if hasattr(field, 'description') and field.description != '':
html = """<p class="help-block">{field.description}</p>"""
html = html.format(
field=field
)
return HTMLString(html)
return '' |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def update_post_relations(user, topic, deleting=False):
""" helper function to update user post count and parent topic post_count. """ |
if deleting:
user.post_count = user.post_count - 1
else:
user.post_count += 1
user.save(update_fields=['post_count'])
topic.modified = datetime.now()
topic.modified_int = time.time()
topic.save(update_fields=['modified', 'modified_int']) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def addClassKey(self, klass, key, obj):
""" Adds an object to the collection, based on klass and key. @param klass: The class of the object. @param key: The datastore key of the object. @param obj: The loaded instance from the datastore. """ |
d = self._getClass(klass)
d[key] = obj |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def parse_duration(duration, start=None, end=None):
""" Attepmt to parse an ISO8601 formatted duration. Accepts a ``duration`` and optionally a start or end ``datetime``. ``duration`` must be an ISO8601 formatted string. Returns a ``datetime.timedelta`` object. """ |
if not start and not end:
return parse_simple_duration(duration)
if start:
return parse_duration_with_start(start, duration)
if end:
return parse_duration_with_end(duration, end) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def parse_simple_duration(duration):
""" Attepmt to parse an ISO8601 formatted duration, using a naive calculation. Accepts a ``duration`` which must be an ISO8601 formatted string, and assumes 365 days in a year and 30 days in a month for the calculation. Returns a ``datetime.timedelta`` object. """ |
elements = _parse_duration_string(_clean(duration))
if not elements:
raise ParseError()
return _timedelta_from_elements(elements) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def parse_duration_with_start(start, duration):
""" Attepmt to parse an ISO8601 formatted duration based on a start datetime. Accepts a ``duration`` and a start ``datetime``. ``duration`` must be an ISO8601 formatted string. Returns a ``datetime.timedelta`` object. """ |
elements = _parse_duration_string(_clean(duration))
year, month = _year_month_delta_from_elements(elements)
end = start.replace(
year=start.year + year,
month=start.month + month
)
del elements['years']
del elements['months']
end += _timedelta_from_elements(elements)
return start, end - start |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def int_to_var_bytes(x):
"""Converts an integer to a bitcoin variable length integer as a bytearray :param x: the integer to convert """ |
if x < 253:
return intbytes.to_bytes(x, 1)
elif x < 65536:
return bytearray([253]) + intbytes.to_bytes(x, 2)[::-1]
elif x < 4294967296:
return bytearray([254]) + intbytes.to_bytes(x, 4)[::-1]
else:
return bytearray([255]) + intbytes.to_bytes(x, 8)[::-1] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def bitcoin_sig_hash(message):
"""Bitcoin has a special format for hashing messages for signing. :param message: the encoded message to hash in preparation for verifying """ |
padded = b'\x18Bitcoin Signed Message:\n' +\
int_to_var_bytes(len(message)) +\
message
return double_sha256(padded) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def verify_signature(message, signature, address):
"""This function verifies a bitcoin signed message. :param message: the plain text of the message to verify :param signature: the signature in base64 format :param address: the signing address """ |
if (len(signature) != SIGNATURE_LENGTH):
return False
try:
binsig = base64.b64decode(signature)
except:
return False
r = intbytes.from_bytes(binsig[1:33])
s = intbytes.from_bytes(binsig[33:65])
val = intbytes.from_bytes(bitcoin_sig_hash(message.encode()))
pubpairs = possible_public_pairs_for_signature(
generator_secp256k1,
val,
(r, s))
addr_hash160 = bitcoin_address_to_hash160_sec(address)
for pair in pubpairs:
if (public_pair_to_hash160_sec(pair, True) == addr_hash160):
return True
if (public_pair_to_hash160_sec(pair, False) == addr_hash160):
return True
return False |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def query(self, query):
"""Returns an iterable of objects matching criteria expressed in `query`. Implementations of query will be the largest differentiating factor amongst datastores. All datastores **must** implement query, even using query's worst case scenario, see :ref:class:`Query` for details. :param query: Query object describing the objects to return. :return: iterable cursor with all objects matching criteria """ |
return query((self._deserialised_value(x) for x in self.container.get_objects(prefix=query.key))) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def save(self, force_insert=False, force_update=False):
""" Saves the current instance. Override this in a subclass if you want to control the saving process. The 'force_insert' and 'force_update' parameters can be used to insist that the "save" must be a POST or PUT respectively. Normally, they should not be set. """ |
if force_insert and force_update:
raise ValueError("Cannot force both insert and updating in resource saving.")
data = {}
for name, field in self._meta.fields.items():
if field.serialize:
data[name] = field.dehydrate(getattr(self, name, None))
insert = True if force_insert or self.resource_uri is None else False
if insert:
resp = self._meta.api.http_resource("POST", self._meta.resource_name, data=self._meta.api.resource_serialize(data))
else:
resp = self._meta.api.http_resource("PUT", self.resource_uri, data=self._meta.api.resource_serialize(data))
if "Location" in resp.headers:
resp = self._meta.api.http_resource("GET", resp.headers["Location"])
elif resp.status_code == 204:
resp = self._meta.api.http_resource("GET", self.resource_uri)
else:
return
data = self._meta.api.resource_deserialize(resp.text)
# Update local values from the API Response
self.__init__(**data) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def delete(self):
""" Deletes the current instance. Override this in a subclass if you want to control the deleting process. """ |
if self.resource_uri is None:
raise ValueError("{0} object cannot be deleted because resource_uri attribute cannot be None".format(self._meta.resource_name))
self._meta.api.http_resource("DELETE", self.resource_uri) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def spawn_actor(self, factory, name=None):
"""Spawns an actor using the given `factory` with the specified `name`. Returns an immediately usable `Ref` to the newly created actor, regardless of the location of the new actor, or when the actual spawning will take place. """ |
if name and '/' in name: # pragma: no cover
raise TypeError("Actor names cannot contain slashes")
if not self._children:
self._children = {}
uri = self.uri / name if name else None
if name:
if name.startswith('$'):
raise ValueError("Unable to spawn actor at path %s; name cannot start with '$', it is reserved for auto-generated names" % (uri.path,))
if name in self._children:
raise NameConflict("Unable to spawn actor at path %s; actor %r already sits there" % (uri.path, self._children[name]))
if not uri:
name = self._generate_name(factory)
uri = self.uri / name
assert name not in self._children # XXX: ordering??
child = self._children[name] = Cell.spawn(parent_actor=self.ref, factory=factory, uri=uri, node=self.node).ref
return child |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def lookup_cell(self, uri):
"""Looks up a local actor by its location relative to this actor.""" |
steps = uri.steps
if steps[0] == '':
found = self.root
steps.popleft()
else:
found = self
for step in steps:
assert step != ''
found = found.get_child(step)
if not found:
break
found = found._cell
return found |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def r_num(obj):
"""Read list of numbers.""" |
if isinstance(obj, (list, tuple)):
it = iter
else:
it = LinesIterator
dataset = Dataset([Dataset.FLOAT])
return dataset.load(it(obj)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def r_date_num(obj, multiple=False):
"""Read date-value table.""" |
if isinstance(obj, (list, tuple)):
it = iter
else:
it = LinesIterator
if multiple:
datasets = {}
for line in it(obj):
label = line[2]
if label not in datasets:
datasets[label] = Dataset([Dataset.DATE, Dataset.FLOAT])
datasets[label].name = label
datasets[label].parse_elements(line[0:2])
return datasets.values()
dataset = Dataset([Dataset.DATE, Dataset.FLOAT])
return dataset.load(it(obj)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def command(self, rule, **options):
"""\ direct=False, override=True, inject=False, flags=0 """ |
options.setdefault("direct", False)
options.setdefault("override", True)
options.setdefault("inject", False)
options.setdefault("flags", 0)
if not options["direct"]:
rule = self.regexy(rule)
regex = re.compile(rule, flags=options["flags"])
self.handlers.setdefault(regex, [])
def handler(f):
if f == noop:
f.options = {}
else:
f.options = options
if options["override"]:
self.handlers[regex] = [f]
else:
self.handlers[regex].append(f)
f.no_args = self.no_args(f)
return f
return handler |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def authorize_client_credentials( self, client_id, client_secret=None, scope="private_agent" ):
"""Authorize to platform with client credentials This should be used if you posses client_id/client_secret pair generated by platform. """ |
self.auth_data = {
"grant_type": "client_credentials",
"scope": [ scope ],
"client_id": client_id,
"client_secret": client_secret
}
self._do_authorize() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def authorize_password(self, client_id, username, password):
"""Authorize to platform as regular user You must provide a valid client_id (same as web application), your password and your username. Username and password is not stored in client but refresh token is stored. The only valid scope for this authorization is "regular_user". :param client_id: Valid client_id :type client_id: String :param username: User email :type username: String :param password: User password :type password: String """ |
self.auth_data = {
"grant_type": "password",
"username": username,
"password": password,
"client_id": client_id,
"scope": ["regular_user"]
}
self._do_authorize() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _do_authorize(self):
""" Perform the authorization """ |
if self.auth_data is None:
raise ApiwatcherClientException("You must provide authorization data.")
r = requests.post(
"{0}/api/token".format(self.base_url), json=self.auth_data,
verify=self.verify_certificate, timeout=self.timeout
)
if r.status_code == 401:
raise ApiwatcherClientException("Wrong credentials supplied: {0}".format(
r.json()["message"]
))
elif r.status_code != 201:
try:
reason = r.json()["message"]
except:
reason = r.text
raise ApiwatcherClientException(
"Authorization failed. Reason {0} {1}".format(
r.status_code, reason)
)
else:
data = r.json()["data"]
self.token = data["access_token"]
if "refresh_token" in data:
self.auth_data = {
"grant_type": "refresh_token",
"refresh_token": data["refresh_token"],
"client_id": self.auth_data["client_id"]
} |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _do_request(self, method, endpoint, data=None):
"""Perform one request, possibly solving unauthorized return code """ |
# No token - authorize
if self.token is None:
self._do_authorize()
r = requests.request(
method,
"{0}{1}".format(self.base_url, endpoint),
headers={
"Authorization": "Bearer {0}".format(self.token),
"Content-Type": "application/json"
},
json=data,
verify=self.verify_certificate,
timeout=self.timeout
)
if r.status_code == 401:
self._do_authorize()
r = requests.request(
method,
"{0}{1}".format(self.base_url, endpoint),
headers={
"Authorization": "Bearer {0}".format(self.token),
"Content-Type": "application/json"
},
json=data,
verify=self.verify_certificate,
timeout=self.timeout
)
return r |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def DocFileSuite(*paths, **kwargs):
"""Extension of the standard DocFileSuite that sets up test browser for use in doctests.""" |
kwargs.setdefault('setUp', setUpBrowser)
kwargs.setdefault('tearDown', tearDownBrowser)
kwargs.setdefault('globs', {}).update(Browser=Browser)
kwargs.setdefault('optionflags', doctest.NORMALIZE_WHITESPACE |
doctest.REPORT_ONLY_FIRST_FAILURE |
doctest.ELLIPSIS)
if 'package' not in kwargs:
# Resolve relative names based on the caller's module
kwargs['package'] = doctest._normalize_module(None)
kwargs['module_relative'] = True
return doctest.DocFileSuite(*paths, **kwargs) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def queryHTML(self, path):
"""Run an XPath query on the HTML document and print matches.""" |
if etree is None:
raise Exception("lxml not available")
document = etree.HTML(self.contents)
for node in document.xpath(path):
if isinstance(node, basestring):
print node
else:
print etree.tostring(node, pretty_print=True).strip() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def pipeline_repr(obj):
"""Returns a string representation of an object, including pieshell pipelines.""" |
if not hasattr(repr_state, 'in_repr'):
repr_state.in_repr = 0
repr_state.in_repr += 1
try:
return standard_repr(obj)
finally:
repr_state.in_repr -= 1 |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def run(self, redirects = []):
"""Runs the pipelines with the specified redirects and returns a RunningPipeline instance.""" |
if not isinstance(redirects, redir.Redirects):
redirects = redir.Redirects(self._env._redirects, *redirects)
with copy.copy_session() as sess:
self = copy.deepcopy(self)
processes = self._run(redirects, sess)
pipeline = RunningPipeline(processes, self)
self._env.last_pipeline = pipeline
return pipeline |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def lock_file(path, maxdelay=.1, lock_cls=LockFile, timeout=10.0):
"""Cooperative file lock. Uses `lockfile.LockFile` polling under the hood. `maxdelay` defines the interval between individual polls. """ |
lock = lock_cls(path)
max_t = time.time() + timeout
while True:
if time.time() >= max_t:
raise LockTimeout("Timeout waiting to acquire lock for %s" % (path,)) # same exception messages as in lockfile
try:
lock.acquire(timeout=0)
except AlreadyLocked:
sleep(maxdelay)
else:
try:
yield lock
break
finally:
lock.release() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def digest_content(self, rule):
""" Walk on rule content tokens to return a dict of properties. This is pretty naive and will choke/fail on everything that is more evolved than simple ``ident(string):
value(string)`` Arguments: rule (tinycss2.ast.QualifiedRule):
Qualified rule object as returned by tinycss2. Returns: dict: Dictionnary of retrieved variables and properties. """ |
data = OrderedDict()
current_key = None
for token in rule.content:
# Assume first identity token is the property name
if token.type == 'ident':
# Ignore starting '-' from css variables
name = token.value
if name.startswith('-'):
name = name[1:]
current_key = name
data[current_key] = None
# Assume first following string token is the property value.
if token.type == 'string':
data[current_key] = token.value
return data |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def consume(self, source):
""" Parse source and consume tokens from tinycss2. Arguments: source (string):
Source content to parse. Returns: dict: Retrieved rules. """ |
manifest = OrderedDict()
rules = parse_stylesheet(
source,
skip_comments=True,
skip_whitespace=True,
)
for rule in rules:
# Gather rule selector+properties
name = self.digest_prelude(rule)
# Ignore everything out of styleguide namespace
if not name.startswith(RULE_BASE_PREFIX):
continue
properties = self.digest_content(rule)
manifest[name] = properties
return manifest |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def post_comment(request, next=None, using=None):
""" Post a comment. HTTP POST is required. """ |
# Fill out some initial data fields from an authenticated user, if present
data = request.POST.copy()
if request.user.is_authenticated:
data["user"] = request.user
else:
return CommentPostBadRequest("You must be logged in to comment")
# Look up the object we're trying to comment about
ctype = data.get("content_type")
object_pk = data.get("object_pk")
if ctype is None or object_pk is None:
return CommentPostBadRequest("Missing content_type or object_pk field.")
try:
model = models.get_model(*ctype.split(".", 1))
target = model._default_manager.using(using).get(pk=object_pk)
except TypeError:
return CommentPostBadRequest(
"Invalid content_type value: %r" % escape(ctype))
except AttributeError:
return CommentPostBadRequest(
"The given content-type %r does not resolve to a valid model." % \
escape(ctype))
except ObjectDoesNotExist:
return CommentPostBadRequest(
"No object matching content-type %r and object PK %r exists." % \
(escape(ctype), escape(object_pk)))
except (ValueError, ValidationError) as e:
return CommentPostBadRequest(
"Attempting go get content-type %r and object PK %r exists raised %s" % \
(escape(ctype), escape(object_pk), e.__class__.__name__))
# Construct the comment form
form = comments.get_form()(target, data=data)
# Check security information
if form.security_errors():
return CommentPostBadRequest(
"The comment form failed security verification: %s" % \
escape(str(form.security_errors())))
# Check for next
if not next:
next = data.get("next")
# If there are errors show the comment
if form.errors:
template_list = [
"comments/%s/%s/form.html" % (model._meta.app_label, model._meta.module_name),
"comments/%s/form.html" % model._meta.app_label,
"comments/form.html",
]
return render_to_response(
template_list, {
"comment": form.data.get("comment", ""),
"form": form,
"next": data.get("next", next),
},
RequestContext(request, {})
)
# Otherwise create the comment
comment = form.get_comment_object()
comment.ip_address = request.META.get("REMOTE_ADDR", None)
if request.user.is_authenticated:
comment.user = request.user
# Signal that the comment is about to be saved
responses = signals.comment_will_be_posted.send(
sender=comment.__class__,
comment=comment,
request=request
)
for (receiver, response) in responses:
if response == False:
return CommentPostBadRequest(
"comment_will_be_posted receiver %r killed the comment" % receiver.__name__)
# Save the comment and signal that it was saved
comment.save()
signals.comment_was_posted.send(
sender=comment.__class__,
comment=comment,
request=request
)
messages.success(request, 'Your comment was saved.')
return redirect(next) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def streaming_sample(seq, k, limit=None):
'''Streaming sample.
Iterate over seq (once!) keeping k random elements with uniform
distribution.
As a special case, if ``k`` is ``None``, then ``list(seq)`` is
returned.
:param seq: iterable of things to sample from
:param k: size of desired sample
:param limit: stop reading ``seq`` after considering this many
:return: list of elements from seq, length k (or less if seq is
short)
'''
if k is None:
return list(seq)
seq = iter(seq)
if limit is not None:
k = min(limit, k)
limit -= k
result = list(islice(seq, k))
for count, x in enumerate(islice(seq, limit), len(result)):
if rand.random() < (1.0 / count):
result[rand.randint(0, k-1)] = x
return result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_profile_model():
""" Returns the yacms profile model, defined in ``settings.ACCOUNTS_PROFILE_MODEL``, or ``None`` if no profile model is configured. """ |
if not getattr(settings, "ACCOUNTS_PROFILE_MODEL", None):
raise ProfileNotConfigured
try:
return apps.get_model(settings.ACCOUNTS_PROFILE_MODEL)
except ValueError:
raise ImproperlyConfigured("ACCOUNTS_PROFILE_MODEL must be of "
"the form 'app_label.model_name'")
except LookupError:
raise ImproperlyConfigured("ACCOUNTS_PROFILE_MODEL refers to "
"model '%s' that has not been installed"
% settings.ACCOUNTS_PROFILE_MODEL) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_profile_for_user(user):
""" Returns site-specific profile for this user. Raises ``ProfileNotConfigured`` if ``settings.ACCOUNTS_PROFILE_MODEL`` is not set, and ``ImproperlyConfigured`` if the corresponding model can't be found. """ |
if not hasattr(user, '_yacms_profile'):
# Raises ProfileNotConfigured if not bool(ACCOUNTS_PROFILE_MODEL)
profile_model = get_profile_model()
profile_manager = profile_model._default_manager.using(user._state.db)
user_field = get_profile_user_fieldname(profile_model, user.__class__)
profile, created = profile_manager.get_or_create(**{user_field: user})
profile.user = user
user._yacms_profile = profile
return user._yacms_profile |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.