text_prompt stringlengths 157 13.1k | code_prompt stringlengths 7 19.8k ⌀ |
|---|---|
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def cancel( self, identifier: typing.Any, exc_type: typing.Optional[type]=None, ) -> bool: """Cancel an active coroutine and remove it from the schedule. Args: identifier (typing.Any):
The identifier returned from add. exc_type (typing.Optional[type]):
The exception type to throw into the coroutine on cancel. No exception is thrown if nothing is given. Instead the coroutine is no longer processed. Returns: bool: True if the coroutine is cancelled. False if the identifier is invalid or if the coroutine is complete. """ |
raise NotImplementedError() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _fill_text(self, text, width=None, indent=None):
""" Reflow text width while maintaining certain formatting characteristics like double newlines and indented statements. """ |
assert isinstance(text, str)
if indent is None:
indent = NBSP * self._current_indent
assert isinstance(indent, str)
paragraphs = []
line_buf = []
pre = ''
for fragment in text.splitlines():
pre_indent = self.leadingws.match(fragment)
if not fragment or pre_indent:
if line_buf:
line = ' '.join(line_buf)
paragraphs.append((pre, self.whitespace.sub(' ', line)))
if not fragment:
paragraphs.append(('', ''))
else:
pre = pre_indent.group()
fragment = self.leadingws.sub('', fragment)
paragraphs.append((pre, fragment))
line_buf = []
pre = ''
else:
line_buf.append(fragment)
if line_buf:
line = ' '.join(line_buf)
paragraphs.append((pre, self.whitespace.sub(' ', line)))
indent = VTMLBuffer(indent)
nl = VTMLBuffer('\n')
if width is None:
width = self._width - len(indent)
lines = []
for pre, paragraph in paragraphs:
pwidth = width - len(pre)
lines.append(nl.join((indent + pre + x)
for x in vtmlrender(paragraph).wrap(pwidth)))
return nl.join(lines) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def bind_env(self, action, env):
""" Bind an environment variable to an argument action. The env value will traditionally be something uppercase like `MYAPP_FOO_ARG`. Note that the ENV value is assigned using `set_defaults()` and as such it will be overridden if the argument is set via `parse_args()` """ |
if env in self._env_actions:
raise ValueError('Duplicate ENV variable: %s' % env)
self._env_actions[env] = action
action.env = env |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def print_help(self, *args, **kwargs):
""" Add pager support to help output. """ |
if self._command is not None and self._command.session.allow_pager:
desc = 'Help\: %s' % '-'.join(self.prog.split())
pager_kwargs = self._command.get_pager_spec()
with paging.pager_redirect(desc, **pager_kwargs):
return super().print_help(*args, **kwargs)
else:
return super().print_help(*args, **kwargs) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def add_subparsers(self, prog=None, **kwargs):
""" Supplement a proper `prog` keyword argument for the subprocessor. The superclass technique for getting the `prog` value breaks because of our VT100 escape codes injected by `format_help`. """ |
if prog is None:
# Use a non-shellish help formatter to avoid vt100 codes.
f = argparse.HelpFormatter(prog=self.prog)
f.add_usage(self.usage, self._get_positional_actions(),
self._mutually_exclusive_groups, '')
prog = f.format_help().strip()
return super().add_subparsers(prog=prog, **kwargs) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _analyze(self):
""" Apply the filter to the log file """ |
for parsed_line in self.parsed_lines:
if 'ip' in parsed_line:
if parsed_line['ip'] in self.filter['ips']:
self.noisy_logs.append(parsed_line)
else:
self.quiet_logs.append(parsed_line)
else:
self.quiet_logs.append(parsed_line) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _extract_features(self):
""" Get the feature data from the log file necessary for a reduction """ |
for parsed_line in self.parsed_lines:
result = {'raw': parsed_line}
if 'ip' in parsed_line:
result['ip'] = parsed_line['ip']
if result['ip'] not in self.features['ips']:
self.features['ips'].append(result['ip']) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _get_local_ip():
""" Get the local ip of this device :return: Ip of this computer :rtype: str """ |
return set([x[4][0] for x in socket.getaddrinfo(
socket.gethostname(),
80,
socket.AF_INET
)]).pop() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _init_listen_socket(self):
""" Init listen socket :rtype: None """ |
self.debug("()")
self._listen_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self._listen_socket.setsockopt(
socket.SOL_SOCKET,
socket.SO_REUSEADDR,
1
)
self._listen_socket.bind((self._listen_ip, self._listen_port))
self._listening.append(self._listen_socket) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _shutdown_listen_socket(self):
""" Shutdown listening socket :rtype: None """ |
self.debug("()")
if self._listen_socket in self._listening:
self._listening.remove(self._listen_socket)
if self._listen_socket:
self._listen_socket.close()
self._listen_socket = None |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _send(self, ip, port, data):
""" Send an UDP message :param ip: Ip to send to :type ip: str :param port: Port to send to :type port: int :return: Number of bytes sent :rtype: int """ |
return self._listen_socket.sendto(data, (ip, port)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _send_ack(self, ip, port, packet, update_timestamp=True):
""" Send an ack packet :param ip: Ip to send to :type ip: str :param port: Port to send to :type port: int :param packet: Packet to be acknowledged :type packet: APPMessage :param update_timestamp: Should update timestamp to current :type update_timestamp: bool :rtype: None """ |
# TODO: maybe wait a bit, so the ack could get attached to another
# packet
ack = APPMessage(message_type=MsgType.ACK)
ack.header.ack_sequence_number = packet.header.sequence_number
self._send_packet(
ip, port, ack,
update_timestamp=update_timestamp, acknowledge_packet=False
) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _get_packet(self, socket):
""" Read packet and put it into inbox :param socket: Socket to read from :type socket: socket.socket :return: Read packet :rtype: APPMessage """ |
data, (ip, port) = socket.recvfrom(self._buffer_size)
packet, remainder = self._unpack(data)
self.inbox.put((ip, port, packet))
self.new_packet.set()
self.debug(u"RX: {}".format(packet))
if packet.header.sequence_number is not None:
# Packet needs to be acknowledged
self._send_ack(ip, port, packet)
ack_seq = packet.header.ack_sequence_number
if ack_seq is not None:
# Packet got acknowledged
with self._seq_ack_lock:
if ack_seq in self._seq_ack:
self.debug(u"Seq {} got acked".format(ack_seq))
self._seq_ack.remove(ack_seq)
return packet |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _acking(self, params=None):
""" Packet acknowledge and retry loop :param params: Ignore :type params: None :rtype: None """ |
while self._is_running:
try:
t, num_try, (ip, port), packet = self._to_ack.get(
timeout=self._select_timeout
)
except queue.Empty:
# Timed out
continue
diff = t - time.time()
if diff > 0:
time.sleep(diff)
with self._seq_ack_lock:
if packet.header.sequence_number not in self._seq_ack:
# Not waiting for this?
continue
if num_try <= self._retransmit_max_tries:
# Try again
self._send(ip, port, packet.pack(True))
self._to_ack.put(
(
time.time() + self._retransmit_timeout,
num_try + 1,
(ip, port),
packet
)
)
else:
# Failed to ack
with self._seq_ack_lock:
try:
self._seq_ack.remove(packet.header.sequence_number)
except KeyError:
pass
self.warning("Exceeded max tries") |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _get_request_args(method, **kwargs):
"""Use `method` and other settings to produce a flickr API arguments. Here also use json as the return type. :param method: The method provided by flickr, ex: flickr.photosets.getPhotos :type method: str :param kwargs: Other settings :type kwargs: dict :return: An argument list used for post request :rtype: list of sets """ |
args = [
('api_key', api_key),
('format', 'json'),
('method', method),
('nojsoncallback', '1'),
]
if kwargs:
for key, value in kwargs.iteritems():
args.append((key, value))
args.sort(key=lambda tup: tup[0])
api_sig = _get_api_sig(args)
args.append(api_sig)
return args |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _get_api_sig(args):
"""Flickr API need a hash string which made using post arguments :param args: Arguments of the flickr request :type args: list of sets :return: api_sig, ex: ('api_sig', 'abcdefg') :rtype: tuple """ |
tmp_sig = api_secret
for i in args:
tmp_sig = tmp_sig + i[0] + i[1]
api_sig = hashlib.md5(tmp_sig.encode('utf-8')).hexdigest()
return 'api_sig', api_sig |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def create_dir(path):
"""Create dir with the path :param path: The path to be created :type path: str """ |
if os.path.exists(path):
if not os.path.isdir(path):
logger.error('%s is not a directory', path)
sys.exit(1)
else: # ignore
pass
else:
os.makedirs(path)
logger.info('Create dir: %s', path) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_photos_info(photoset_id):
"""Request the photos information with the photoset id :param photoset_id: The photoset id of flickr :type photoset_id: str :return: photos information :rtype: list """ |
args = _get_request_args(
'flickr.photosets.getPhotos',
photoset_id=photoset_id
)
resp = requests.post(API_URL, data=args)
resp_json = json.loads(resp.text.encode('utf-8'))
logger.debug(resp_json)
photos = resp_json['photoset']['photo']
return photos |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def single_download_photos(photos):
"""Use single process to download photos :param photos: The photos to be downloaded :type photos: list of dicts """ |
global counter
counter = len(photos)
for photo in photos:
download_photo(photo) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def multithread_download_photos(photos):
"""Use multiple threads to download photos :param photos: The photos to be downloaded :type photos: list of dicts """ |
from concurrent import futures
global counter
counter = len(photos)
cpu_num = multiprocessing.cpu_count()
with futures.ThreadPoolExecutor(max_workers=cpu_num) as executor:
for photo in photos:
executor.submit(download_photo, photo) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def init_logger():
"""Initialize the logger and set its format """ |
formatter = logging.Formatter('%(levelname)s: %(message)s')
console = logging.StreamHandler(stream=sys.stdout)
console.setLevel(logging.INFO)
console.setFormatter(formatter)
logger.addHandler(console) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _gevent_patch():
"""Patch the modules with gevent :return: Default is GEVENT. If it not supports gevent then return MULTITHREAD :rtype: int """ |
try:
assert gevent
assert grequests
except NameError:
logger.warn('gevent not exist, fallback to multiprocess...')
return MULTITHREAD
else:
monkey.patch_all() # Must patch before get_photos_info
return GEVENT |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def main():
"""The main procedure """ |
init_logger()
args = _parse_cli_args()
if args.u:
enter_api_key()
return
if args.O == GEVENT:
args.O = _gevent_patch()
set_image_size_mode(args.s)
photoset_id = args.g
global directory
directory = args.d if args.d else photoset_id
read_config()
photos = get_photos_info(photoset_id)
create_dir(directory)
if args.O == SINGLE_PROCESS:
single_download_photos(photos)
elif args.O == GEVENT:
event_download_photos(photos)
elif args.O == MULTITHREAD:
multithread_download_photos(photos)
else:
logger.error('Unknown Error') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def is_cjk_punctuation(char):
"""Returns true if char is a punctuation mark in a CJK language.""" |
lower = int('0x3000', 16)
higher = int('0x300F', 16)
return ord(char) >= lower and ord(char) <= higher |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def force_populate(self):
""" Populates the parser with the entire contents of the word reference file. """ |
if not os.path.exists(self.ref):
raise FileNotFoundError("The reference file path '{}' does not exists.".format(self.ref))
with open(self.ref, 'r') as f:
for word in f:
word = word.strip('\n')
self.db.add(word)
self.populated = True |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def leveled(self):
"""Return all countries with a level set""" |
# Compatibility support for Django<1.6
safe_get_queryset = (self.get_query_set if hasattr(self, 'get_query_set') else self.get_queryset)
return safe_get_queryset.exclude(level=0) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def popd(pop_all=False, throw_if_dir_invalid=True):
"""Restore current working directory to previous directory. The previous directory is whatever it was when last :py:meth:`pushd()` was *last* called. :py:meth:`pushd()` creates a stack, so each call to popd() simply sets the CWD back to what it was on the prior pushd() call. Parameters pop_all : bool, optional When `pop_all` is True, sets the CWD to the state when pushd() was first called. Does NOT call os.getcwd() for intervening paths, only the final path. throw_if_dir_invalid : bool, optional Whether or not to pass back up any exception raised by chdir(). Default is True. Returns ------- True : bool Success False : bool Failure Raises ------ OSError If `throw_if_dir_invalid` is True and chdir raises an exception, this function will chain the same exception as chdir, typically OSError ValueError If popd() called on an empty stack; i.e. before :py:meth:`pushd()` has been called. Notes ----- This method and its counterpart :py:meth:`pushd` are **not** thread safe! """ |
global _pushdstack
from os import chdir
if len(_pushdstack) == 0:
raise ValueError("popd() called on an empty stack.")
if pop_all:
while( len(_pushdstack) > 1):
_pushdstack.pop()
try:
chdir(_pushdstack.pop())
err = 0
except OSError:
if throw_if_dir_invalid:
raise
err = 1
return err == 0 |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def pyVersionStr(self):
"""Version of Python running my script Returns ------- str A descriptive string containing the version of Python running this script. """ |
from sys import version_info
return "Python Interpreter Version: {}.{}.{}".format(version_info.major,
version_info.minor,
version_info.micro) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def all(self):
"""Returns a tuple containing all elements of the object This method returns all elements of the path in the form of a tuple. e.g.: `(abs_path, drive_letter, path_only, rootname, extension, filesize, time_in_seconds)`. Returns ------- tuple All elements of the path associated with this object as a tuple. Notes ----- If path points to a non-existant file, the size and datetime will be returned as None (NoneType). """ |
return (self._full, self._driv, self._path, self._name, self._ext, self._size, self._time) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def format(self, fmt):
"""Returns string representing the items specified in the format string The format string can contain: .. code:: d - drive letter p - path n - name x - extension z - file size t - file time in seconds And, you can string them together, e.g. `dpnx` returns the fully qualified name. On platforms like Unix, where drive letter doesn't make sense, it's simply ignored when used in a format string, making it easy to construct fully qualified path names in an os independent manner. Parameters fmt : str A string representing the elements you want returned. Returns ------- str A string containing the elements of the path requested in `fmt` """ |
val = ''
for x in fmt:
if x == 'd':
val += self._driv
elif x == 'p':
val += self._path
elif x == 'n':
val += self._name
elif x == 'x':
val += self._ext
elif x == 'z':
if self._size != None: val += str(self._size)
elif x == 't':
if self._time != None: val += str(self._time)
return val |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def cached(attr):
""" In-memory caching for a nullary callable. """ |
def decorator(f):
@functools.wraps(f)
def decorated(self):
try:
return getattr(self, attr)
except AttributeError:
value = f(self)
setattr(self, attr, value)
return value
return decorated
return decorator |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def check_cell_type(cell, cell_type):
'''
Checks the cell type to see if it represents the cell_type passed in.
Args:
cell_type: The type id for a cell match or None for empty match.
'''
if cell_type == None or cell_type == type(None):
return cell == None or (isinstance(cell, basestring) and not cell)
else:
return isinstance(cell, cell_type) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def auto_convert_cell_no_flags(cell, units=None, parens_as_neg=True):
'''
Performs a first step conversion of the cell to check
it's type or try to convert if a valid conversion exists.
This version of conversion doesn't flag changes nor store
cell units.
Args:
units: The dictionary holder for cell units.
parens_as_neg: Converts numerics surrounded by parens to
negative values
'''
units = units if units != None else {}
return auto_convert_cell(flagable=Flagable(), cell=cell, position=None, worksheet=0,
flags={}, units=units, parens_as_neg=parens_as_neg) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def auto_convert_cell(flagable, cell, position, worksheet, flags, units, parens_as_neg=True):
'''
Performs a first step conversion of the cell to check
it's type or try to convert if a valid conversion exists.
Args:
parens_as_neg: Converts numerics surrounded by parens to negative values
'''
conversion = cell
# Is an numeric?
if isinstance(cell, (int, float)):
pass
# Is a string?
elif isinstance(cell, basestring):
# Blank cell?
if not cell:
conversion = None
else:
conversion = auto_convert_string_cell(flagable, cell, position, worksheet,
flags, units, parens_as_neg=parens_as_neg)
# Is something else?? Convert to string
elif cell != None:
# Since we shouldn't get this event from most file types,
# make this a warning level conversion flag
flagable.flag_change(flags, 'warning', position, worksheet,
flagable.FLAGS['unknown-to-string'])
conversion = str(cell)
# Empty cell?
if not conversion:
conversion = None
else:
# Otherwise we have an empty cell
pass
return conversion |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def auto_convert_string_cell(flagable, cell_str, position, worksheet, flags,
units, parens_as_neg=True):
'''
Handles the string case of cell and attempts auto-conversion
for auto_convert_cell.
Args:
parens_as_neg: Converts numerics surrounded by parens to negative values
'''
conversion = cell_str.strip()
# Wrapped?
if re.search(allregex.control_wrapping_regex, cell_str):
# Drop the wrapping characters
stripped_cell = cell_str.strip()
mod_cell_str = stripped_cell[1:][:-1].strip()
neg_mult = False
# If the wrapping characters are '(' and ')' and the interior is a number,
# then the number should be interpreted as a negative value
if (stripped_cell[0] == '(' and stripped_cell[-1] == ')' and
re.search(allregex.contains_numerical_regex, mod_cell_str)):
# Flag for conversion to negative
neg_mult = True
flagable.flag_change(flags, 'interpreted', position, worksheet,
flagable.FLAGS['removed-wrapping'])
# Try again without wrapping
converted_value = auto_convert_cell(flagable, mod_cell_str, position,
worksheet, flags, units)
neg_mult = neg_mult and check_cell_type(converted_value, get_cell_type(0))
if neg_mult and parens_as_neg:
flagable.flag_change(flags, 'interpreted', position, worksheet,
flagable.FLAGS['converted-wrapping-to-neg'])
return -converted_value if neg_mult else converted_value
# Is a string containing numbers?
elif re.search(allregex.contains_numerical_regex, cell_str):
conversion = auto_convert_numeric_string_cell(flagable, conversion, position,
worksheet, flags, units)
elif re.search(allregex.bool_regex, cell_str):
flagable.flag_change(flags, 'interpreted', position, worksheet,
flagable.FLAGS['bool-to-int'])
conversion = 1 if re.search(allregex.true_bool_regex, cell_str) else 0
return conversion |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def set(self, field, value):
""" Sets the value of an app field. :param str field: The name of the app field. Trying to set immutable fields ``uuid`` or ``key`` will raise a ValueError. :param value: The new value of the app field. :raises: ValueError """ |
if field == 'uuid':
raise ValueError('uuid cannot be set')
elif field == 'key':
raise ValueError(
'key cannot be set. Use \'reset_key\' method')
else:
self.data[field] = value |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def reset_key(self):
""" Resets the app's key on the `unicore.hub` server. :returns: str -- the new key """ |
new_key = self.client.reset_app_key(self.get('uuid'))
self.data['key'] = new_key
return new_key |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def raise_exception(self, original_exception=None):
"""Raise a retry exception if under the max retries. After, raise the original_exception provided to this method or a generic Exception if none provided. """ |
if self._executed_retries < self._max_retries:
curr_backoff = self._ms_backoff
self._executed_retries += 1
self._ms_backoff = self._ms_backoff * 2
raise ActionRetryException(curr_backoff)
else:
raise original_exception or Exception() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def main():
# pragma: nocover """Print checksum and file name for all files in the directory. """ |
p = argparse.ArgumentParser(add_help="Recursively list interesting files.")
p.add_argument(
'directory', nargs="?", default="",
help="The directory to process (current dir if omitted)."
)
p.add_argument(
'--verbose', '-v', action='store_true',
help="Increase verbosity."
)
args = p.parse_args()
args.curdir = os.getcwd()
if not args.directory:
args.direcotry = args.curdir
if args.verbose:
print(args)
for chsm, fname in list_files(args.directory):
print(chsm, fname) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def process_exists(pid=None):
""" Evaluates a Pid Value defaults to the currently foucsed window against the current open programs, if there is a match returns the process name and pid otherwise returns None, None """ |
if not pid:
pid = current_pid()
elif callable(pid):
pid = pid()
if pid and psutil.pid_exists(pid):
pname = psutil.Process(pid).name()
if os.name == 'nt':
return os.path.splitext(pname)[0], pid
return pname, pid
return None, None |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def get_active_window_pos():
'''screen coordinates massaged so that movewindow command works to
restore the window to the same position
returns x, y
'''
# http://stackoverflow.com/questions/26050788/in-bash-on-ubuntu-14-04-unity-how-can-i-get-the-total-size-of-an-open-window-i/26060527#26060527
cmd = ['xdotool','getactivewindow', 'getwindowgeometry']
res = subprocess.Popen(cmd, stdout = subprocess.PIPE, stderr= subprocess.PIPE).communicate()
stdout = res[0].decode('utf-8').splitlines()
pos = stdout[1].split(':')[1].split(',')
geo = stdout[2].split(':')[1].split('x')
x, y = int(pos[0].strip()), int(pos[1].split('(')[0].strip())
w, h = int(geo[0].strip()), int(geo[1].strip())
# get the window decorations
window_id = get_window_id()
cmd = ['xprop', '_NET_FRAME_EXTENTS', '-id', window_id]
res = subprocess.Popen(cmd, stdout = subprocess.PIPE, stderr= subprocess.PIPE).communicate()
decos = res[0].decode('utf-8').split('=')[1].split(',')
l, r = int(decos[0].strip()), int(decos[1].strip())
t, b = int(decos[2].strip()), int(decos[3].strip())
return x-l, y-t |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def get_active_title():
'''returns the window title of the active window'''
if os.name == 'posix':
cmd = ['xdotool','getactivewindow','getwindowname']
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
title = proc.communicate()[0].decode('utf-8')
else:
raise NotImplementedError
return title |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def get_processes():
'''returns process names owned by the user'''
user = getpass.getuser()
for proc in psutil.process_iter():
if proc.username() != user:
continue
pname = psutil.Process(proc.pid).name()
if os.name == 'nt':
pname = pname[:-4] # removiing .exe from end
yield pname |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def get_titles():
'''returns titles of all open windows'''
if os.name == 'posix':
for proc in get_processes():
cmd = ['xdotool','search','--name', proc]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
window_ids = proc.communicate()[0].decode('utf-8')
if window_ids:
for window_id in window_ids.split('\n'):
cmd = ['xdotool','getwindowname',window_id]
proc = subprocess.Popen(cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
title = proc.communicate()[0].decode('utf-8')
try:
if title[-1] == '\n':
title = title[:-1]
yield title
except IndexError:
pass
else:
raise NotImplementedError |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def get_gcd(a, b):
"Return greatest common divisor for a and b."
while a:
a, b = b % a, a
return b |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def auto_tweet(sender, instance, *args, **kwargs):
""" Allows auto-tweeting newly created object to twitter on accounts configured in settings. You MUST create an app to allow oAuth authentication to work: -- https://dev.twitter.com/apps/ You also must set the app to "Read and Write" access level, and create an access token. Whew. """ |
if not twitter or getattr(settings, 'TWITTER_SETTINGS') is False:
#print 'WARNING: Twitter account not configured.'
return False
if not kwargs.get('created'):
return False
twitter_key = settings.TWITTER_SETTINGS
try:
api = twitter.Api(
consumer_key = twitter_key['consumer_key'],
consumer_secret = twitter_key['consumer_secret'],
access_token_key = twitter_key['access_token_key'],
access_token_secret = twitter_key['access_token_secret']
)
except Exception as error:
print("failed to authenticate: {}".format(error))
text = instance.text
if instance.link:
link = instance.link
else:
link = instance.get_absolute_url()
text = '{} {}'.format(text, link)
try:
api.PostUpdate(text)
except Exception as error:
print("Error posting to twitter: {}".format(error)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def postalCodeLookup(self, countryCode, postalCode):
""" Looks up locations for this country and postal code. """ |
params = {"country": countryCode, "postalcode": postalCode}
d = self._call("postalCodeLookupJSON", params)
d.addCallback(operator.itemgetter("postalcodes"))
return d |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _print(*args):
""" Print txt by coding GBK. *args list, list of printing contents """ |
if not CFG.debug:
return
if not args:
return
encoding = 'gbk'
args = [_cs(a, encoding) for a in args]
f_back = None
try:
raise Exception
except:
f_back = sys.exc_traceback.tb_frame.f_back
f_name = f_back.f_code.co_name
filename = os.path.basename(f_back.f_code.co_filename)
m_name = os.path.splitext(filename)[0]
prefix = ('[%s.%s]'%(m_name, f_name)).ljust(20, ' ')
if os.name == 'nt':
for i in range(len(args)):
v = args [i]
if isinstance(v, str):
args[i] = v #v.decode('utf8').encode('gbk')
elif isinstance(v, unicode):
args[i] = v.encode('gbk')
print '[%s]'%str(datetime.datetime.now()), prefix, ' '.join(args) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _print_err(*args):
""" Print errors. *args list, list of printing contents """ |
if not CFG.debug:
return
if not args:
return
encoding = 'utf8' if os.name == 'posix' else 'gbk'
args = [_cs(a, encoding) for a in args]
f_back = None
try:
raise Exception
except:
f_back = sys.exc_traceback.tb_frame.f_back
f_name = f_back.f_code.co_name
filename = os.path.basename(f_back.f_code.co_filename)
m_name = os.path.splitext(filename)[0]
prefix = ('[%s.%s]'%(m_name, f_name)).ljust(20, ' ')
print bcolors.FAIL+'[%s]'%str(datetime.datetime.now()), prefix, ' '.join(args) + bcolors.ENDC |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def fileprint(filename, category, level=logging.DEBUG, maxBytes=1024*10124*100, backupCount=0):
""" Print files by file size. filename string, file name category string, category path of logs file in log directory level enumerated type of logging module, restrict whether logs to be printed or not maxBytes int, max limit of file size backupCount int, allowed numbers of file copys """ |
path = os.path.join(CFG.filedir, category, filename)
# Initialize filer
filer = logging.getLogger(filename)
frt = logging.Formatter('%(message)s')
hdr = RotatingFileHandler(path, 'a', maxBytes, backupCount, 'utf-8')
hdr.setFormatter(frt)
hdr._name = '##_rfh_##'
already_in = False
for _hdr in filer.handlers:
if _hdr._name == '##_rfh_##':
already_in = True
break
if not already_in:
filer.addHandler(hdr)
hdr = logging.StreamHandler(sys.stdout)
hdr.setFormatter(frt)
hdr._name = '##_sh_##'
already_in = False
for _hdr in filer.handlers:
if _hdr._name == '##_sh_##':
already_in = True
if not already_in:
filer.addHandler(hdr)
filer.setLevel(level)
def _wraper(*args):
if not args:
return
encoding = 'utf8' if os.name == 'posix' else 'gbk'
args = [_cu(a, encoding) for a in args]
filer.info(' '.join(args))
return _wraper, filer |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def pt_on_bezier_curve(P=[(0.0, 0.0)], t=0.5):
'''Return point at t on bezier curve defined by control points P.
'''
assert isinstance(P, list)
assert len(P) > 0
for p in P:
assert isinstance(p, tuple)
for i in p:
assert len(p) > 1
assert isinstance(i, float)
assert isinstance(t, float)
assert 0 <= t <= 1
O = len(P) - 1 # Order of curve
# Recurse down the orders calculating the next set of control points until
# there is only one left, which is the point we want.
Q = P
while O > 0:
Q = [pt_between_pts(Q[l], Q[l+1], t) for l in range(O)]
O -= 1
assert len(Q) == 1
return Q[0] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def pts_on_bezier_curve(P=[(0.0, 0.0)], n_seg=0):
'''Return list N+1 points representing N line segments on bezier curve
defined by control points P.
'''
assert isinstance(P, list)
assert len(P) > 0
for p in P:
assert isinstance(p, tuple)
for i in p:
assert len(p) > 1
assert isinstance(i, float)
assert isinstance(n_seg, int)
assert n_seg >= 0
return [pt_on_bezier_curve(P, float(i)/n_seg) for i in range(n_seg)] + [P[-1]] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def bezier_curve_approx_len(P=[(0.0, 0.0)]):
'''Return approximate length of a bezier curve defined by control points P.
Segment curve into N lines where N is the order of the curve, and accumulate
the length of the segments.
'''
assert isinstance(P, list)
assert len(P) > 0
for p in P:
assert isinstance(p, tuple)
for i in p:
assert len(p) > 1
assert isinstance(i, float)
n_seg = len(P) - 1
pts = pts_on_bezier_curve(P, n_seg)
return sum([distance_between_pts(pts[i], pts[i+1]) for i in range(n_seg)]) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def engage(args, password):
""" Construct payloads and POST to Red October """ |
if args['create']:
payload = {'Name': args['--user'], 'Password': password}
goodquit_json(api_call('create', args, payload))
elif args['delegate']:
payload = {
'Name': args['--user'], 'Password': password,
'Time': args['--time'], 'Uses': args['--uses']
}
goodquit_json(api_call('delegate', args, payload))
elif args['encrypt']:
payload = {
'Name': args['--user'], 'Password': password,
'Minimum': args['--min'], 'Owners': args['--owners'].split(','),
'Data': (args['--str'] if args['--file'] is None
else read_file(args['--file']))
}
goodquit_json(api_call('encrypt', args, payload))
elif args['decrypt']:
payload = {
'Name': args['--user'], 'Password': password,
'Data': (args['--str'] if args['--file'] is None
else read_file(args['--file']))
}
goodquit_json(api_call('decrypt', args, payload))
elif args['summary']:
payload = {'Name': args['--user'], 'Password': password}
goodquit_json(api_call('summary', args, payload))
elif args['change-password']:
args['newpass'] = getpass.getpass('New Password: ')
payload = {
'Name': args['--user'], 'Password': password,
'NewPassword': args['newpass']
}
goodquit_json(api_call('password', args, payload))
elif args['modify']:
payload = {
'Name': args['--user'], 'Password': password,
'Command': args['--action'], 'ToModify': args['--target']
}
goodquit_json(api_call('modify', args, payload)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def parse(self, data, filename='', debuglevel=0):
""" Parse given data. data: A string containing the filter definition filename: Name of the file being parsed (for meaningful error messages) debuglevel: Debug level to yacc """ |
self.lexer.filename = filename
self.lexer.reset_lineno()
if not data or data.isspace():
return []
return self.parser.parse(data, lexer=self.lexer, debug=debuglevel) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _create_factor_rule(tok):
""" Simple helper method for creating factor node objects based on node name. """ |
if tok[0] == 'IPV4':
return IPV4Rule(tok[1])
if tok[0] == 'IPV6':
return IPV6Rule(tok[1])
if tok[0] == 'DATETIME':
return DatetimeRule(tok[1])
if tok[0] == 'TIMEDELTA':
return TimedeltaRule(tok[1])
if tok[0] == 'INTEGER':
return IntegerRule(tok[1])
if tok[0] == 'FLOAT':
return FloatRule(tok[1])
if tok[0] == 'VARIABLE':
return VariableRule(tok[1])
return ConstantRule(tok[1]) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def unshorten_url(short_url):
"""Unshortens the short_url or returns None if not possible.""" |
short_url = short_url.strip()
if not short_url.startswith('http'):
short_url = 'http://{0}'.format(short_url)
try:
cached_url = UnshortenURL.objects.get(short_url=short_url)
except UnshortenURL.DoesNotExist:
cached_url = UnshortenURL(short_url=short_url)
else:
return cached_url.long_url
try:
resp = urllib2.urlopen(short_url)
except (
urllib2.HTTPError, urllib2.URLError,
httplib.HTTPException):
return None
if resp.code == 200:
cached_url.long_url = resp.url
cached_url.save()
return resp.url |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def stop(self, spider_name=None):
"""Stop the named running spider, or the first spider found, if spider_name is None""" |
if spider_name is None:
spider_name = self.spider_name
else:
self.spider_name = spider_name
if self.spider_name is None:
self.spider_name = self.list_running()[0].split(':')[-1]
self.jsonrpc_call('crawler/engine', 'close_spider', self.spider_name) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def copy(self):
'''
makes a clone copy of the mapper. It won't clone the serializers or deserializers and it won't copy the events
'''
try:
tmp = self.__class__()
except Exception:
tmp = self.__class__(self._pdict)
tmp._serializers = self._serializers
tmp.__deserializers = self.__deserializers
return tmp |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _get_required_fn(fn, root_path):
""" Definition of the MD5 file requires, that all paths will be absolute for the package directory, not for the filesystem. This function converts filesystem-absolute paths to package-absolute paths. Args: fn (str):
Local/absolute path to the file. root_path (str):
Local/absolute path to the package directory. Returns: str: Package-absolute path to the file. Raises: ValueError: When `fn` is absolute and `root_path` relative or \ conversely. """ |
if not fn.startswith(root_path):
raise ValueError("Both paths have to be absolute or local!")
replacer = "/" if root_path.endswith("/") else ""
return fn.replace(root_path, replacer, 1) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def partition(f, xs):
"""
Works similar to filter, except it returns a two-item tuple where the
first item is the sequence of items that passed the filter and the
second is a sequence of items that didn't pass the filter
""" |
t = type(xs)
true = filter(f, xs)
false = [x for x in xs if x not in true]
return t(true), t(false) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def lazy_binmap(f, xs):
"""
Maps a binary function over a sequence. The function is applied to each item
and the item after it until the last item is reached.
""" |
return (f(x, y) for x, y in zip(xs, xs[1:])) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def lazy_reverse_binmap(f, xs):
"""
Same as lazy_binmap, except the parameters are flipped for the binary function
""" |
return (f(y, x) for x, y in zip(xs, xs[1:])) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def analog_linear2_ramp(ramp_data, start_time, end_time, value_final, time_subarray):
"""Use this when you want a discontinuous jump at the end of the linear ramp.""" |
value_initial = ramp_data["value"]
value_final2 = ramp_data["value_final"]
interp = (time_subarray - start_time)/(end_time - start_time)
return value_initial*(1.0 - interp) + value_final2*interp |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def bake(self):
"""Find absolute times for all keys. Absolute time is stored in the KeyFrame dictionary as the variable __abs_time__. """ |
self.unbake()
for key in self.dct:
self.get_absolute_time(key)
self.is_baked = True |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def unbake(self):
"""Remove absolute times for all keys.""" |
for key in self.dct:
# pop __abs_time__ if it exists
self.dct[key].pop('__abs_time__', None)
self.is_baked = False |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_absolute_time(self, key):
"""Returns the absolute time position of the key. If absolute time positions are not calculated, then this function calculates it. """ |
keyframe = self.dct[key]
try:
# if absolute time is already calculated, return that
return keyframe['__abs_time__']
except KeyError:
# if not, calculate by adding relative time to parent's time
if keyframe['parent'] is None:
keyframe['__abs_time__'] = keyframe['time']
else:
parent_time = self.get_absolute_time(keyframe['parent'])
abs_time = keyframe['time'] + parent_time
keyframe['__abs_time__'] = abs_time
return keyframe['__abs_time__'] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def sorted_key_list(self):
"""Returns list of keys sorted according to their absolute time.""" |
if not self.is_baked:
self.bake()
key_value_tuple = sorted(self.dct.items(),
key=lambda x: x[1]['__abs_time__'])
skl = [k[0] for k in key_value_tuple]
return skl |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def set_time(self, key_name, new_time):
"""Sets the time of key.""" |
self.unbake()
kf = self.dct[key_name]
kf['time'] = new_time
self.bake() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def set_comment(self, key_name, new_comment):
"""Sets the comment of key.""" |
kf = self.dct[key_name]
kf['comment'] = new_comment |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def set_parent(self, key_name, new_parent):
"""Sets the parent of the key.""" |
self.unbake()
kf = self.dct[key_name]
kf['parent'] = new_parent
self.bake() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def is_ancestor(self, child_key_name, ancestor_key_name):
"""Returns True if ancestor lies in the ancestry tree of child.""" |
# all keys are descendents of None
if ancestor_key_name is None:
return True
one_up_parent = self.dct[child_key_name]['parent']
if child_key_name == ancestor_key_name:
# debatable semantics, but a person lies in his/her own
# ancestry tree
return True
elif one_up_parent is None:
return False
else:
return self.is_ancestor(one_up_parent, ancestor_key_name) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def add_hook(self, key_name, hook_name, hook_dict):
"""Add hook to the keyframe key_name.""" |
kf = self.dct[key_name]
if 'hooks' not in kf:
kf['hooks'] = {}
kf['hooks'][str(hook_name)] = hook_dict |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def remove_hook(self, key_name, hook_name):
"""Remove hook from the keyframe key_name.""" |
kf = self.dct[key_name]
if 'hooks' in kf:
if hook_name in kf['hooks']:
return kf['hooks'].pop(hook_name) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def list_hooks(self, key_name):
"""Return list of all hooks attached to key_name.""" |
kf = self.dct[key_name]
if 'hooks' not in kf:
return []
else:
return kf['hooks'].iterkeys() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def do_keyframes_overlap(self):
"""Checks for keyframs timing overlap. Returns the name of the first keyframs that overlapped.""" |
skl = self.sorted_key_list()
for i in range(len(skl)-1):
this_time = self.dct[skl[i]]['__abs_time__']
next_time = self.dct[skl[i+1]]['__abs_time__']
if abs(next_time-this_time) < 1e-6:
# key frame times overlap
return skl[i]
# Return None if all passed
return None |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def del_unused_keyframes(self):
"""Scans through list of keyframes in the channel and removes those which are not in self.key_frame_list.""" |
skl = self.key_frame_list.sorted_key_list()
unused_keys = [k for k in self.dct['keys']
if k not in skl]
for k in unused_keys:
del self.dct['keys'][k] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_used_key_frames(self):
"""Returns a list of the keyframes used by this channel, sorted with time. Each element in the list is a tuple. The first element is the key_name and the second is the channel data at that keyframe.""" |
skl = self.key_frame_list.sorted_key_list()
# each element in used_key_frames is a tuple (key_name, key_dict)
used_key_frames = []
for kf in skl:
if kf in self.dct['keys']:
used_key_frames.append((kf, self.dct['keys'][kf]))
return used_key_frames |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_ramp_regions(self):
"""Returns a numpy array where each element corresponds to whether to ramp in that region or jump.""" |
skl = self.key_frame_list.sorted_key_list()
ramp_or_jump = np.zeros(len(skl) - 1)
used_key_frames = self.get_used_key_frame_list()
for region_number, start_key in enumerate(skl[:-1]):
if start_key in used_key_frames:
key_data = self.dct['keys'][start_key]
ramp_type = key_data['ramp_type']
if ramp_type != "jump":
# this means that a ramp starts in this region. Figure
# out where it ends
curr_key_num = used_key_frames.index(start_key)
end_key_number = curr_key_num + 1
# figure out if the current key was the last key
if end_key_number < len(used_key_frames):
# if it wasnt, then find the end region
end_key_name = used_key_frames[end_key_number]
end_region_index = skl.index(end_key_name)
ramp_or_jump[region_number:end_region_index] = 1
return ramp_or_jump |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def generate_ramp(self, time_div=4e-3):
"""Returns the generated ramp and a time array. This function assumes a uniform time division throughout. time_div - time resolution of the ramp. """ |
if self.dct['type'] == 'analog':
is_analog = True
else:
is_analog = False
skl = self.key_frame_list.sorted_key_list()
# each element in used_key_frames is a tuple (key_name, key_dict)
used_key_frames = self.get_used_key_frames()
max_time = self.key_frame_list.get_absolute_time(skl[-1]) + time_div
num_points = int(round(max_time/time_div))
time = np.arange(num_points) * time_div
# time = np.arange(0.0, max_time, time_div)
if is_analog:
voltage = np.zeros(time.shape, dtype=float)
else:
voltage = np.zeros(time.shape, dtype='uint32')
kf_times = np.array([self.key_frame_list.get_absolute_time(ukf[0])
for ukf in used_key_frames])
kf_positions = kf_times/time_div
if is_analog:
# set the start and the end part of the ramp
start_voltage = used_key_frames[0][1]['ramp_data']['value']
end_voltage = used_key_frames[-1][1]['ramp_data']['value']
voltage[0:kf_positions[0]] = start_voltage
voltage[kf_positions[-1]:] = end_voltage
else:
start_voltage = int(used_key_frames[0][1]['state'])
end_voltage = int(used_key_frames[-1][1]['state'])
voltage[0:kf_positions[0]] = start_voltage
voltage[kf_positions[-1]:] = end_voltage
for i in range(len(kf_times)-1):
start_time = kf_times[i]
end_time = kf_times[i+1]
start_index = kf_positions[i]
end_index = kf_positions[i+1]
time_subarray = time[start_index:end_index]
ramp_type = used_key_frames[i][1]['ramp_type']
ramp_data = used_key_frames[i][1]['ramp_data']
if is_analog:
value_final = used_key_frames[i+1][1]['ramp_data']['value']
else:
state = used_key_frames[i][1]['state']
if is_analog:
parms_tuple = (ramp_data, start_time, end_time, value_final,
time_subarray)
else:
parms_tuple = (ramp_data, start_time, end_time, state,
time_subarray)
if is_analog:
ramp_function = analog_ramp_functions[ramp_type]
else:
ramp_function = digital_ramp_functions[ramp_type]
voltage_sub = ramp_function(*parms_tuple)
voltage[start_index:end_index] = voltage_sub
# finally use the conversion and return the voltage
return time, self.convert_voltage(voltage, time) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def possible_forms(self):
""" Generate a list of possible forms for the current lemma :returns: List of possible forms for the current lemma :rtype: [str] """ |
forms = []
for morph in self.modele().morphos():
for desinence in self.modele().desinences(morph):
radicaux = self.radical(desinence.numRad())
if isinstance(radicaux, Radical):
forms.append(radicaux.gr() + desinence.gr())
else:
for rad in radicaux:
forms.append(rad.gr() + desinence.gr())
return list(set(forms)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_moves():
"""Visit Bulbapedia and pull names and descriptions from the table, 'list of moves.' Save as JSON.""" |
page = requests.get('http://bulbapedia.bulbagarden.net/wiki/List_of_moves')
soup = bs4.BeautifulSoup(page.text)
table = soup.table.table
tablerows = [tr for tr in table.children if tr != '\n'][1:]
moves = {}
for tr in tablerows:
cells = tr.find_all('td')
move_name = cells[1].get_text().strip(' \n*').replace(' ', '-').lower()
move_id = int(cells[0].get_text().strip())
move_type = cells[2].get_text().strip()
move_ps = cells[3].get_text().strip()
moves[move_name] = {'id':move_id, 'type':move_type, 'ps':move_ps}
srcpath = path.dirname(__file__)
with io.open(path.join(srcpath, 'moves.json'), 'w', encoding='utf-8') as f:
f.write(json.dumps(moves, ensure_ascii=False)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def load_from_path(path, filetype=None, has_filetype=True):
""" load file content from a file specified as dot-separated The file is located according to logic in normalize_path, and the contents are returned. (See Note 1) Parameters: (see normalize_path) path - dot-separated path filetype - optional filetype has_filetype - if True, treat last dot-delimited token as filetype Notes: 1. If path is a file-like object, then data is read directly from path, without trying to open it. 2. Non-string paths are returned immediately (excluding the case in Note 1). 3. If has_filetype is True, filetype does not have to be specified. If filetype is specified, has_filetype is ignored, and filetype must match the last dot-delimited token exactly. """ |
if not isinstance(path, str):
try:
return path.read()
except AttributeError:
return path
path = normalize_path(path, filetype, has_filetype)
with open(path) as data:
return data.read() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def load_lines_from_path(path, filetype=None, has_filetype=True):
""" load lines from a file specified as dot-separated The file is located according to logic in normalize_path, and a list of lines is returned. (See Note 1) Parameters: (see normalize_path) path - dot-separated path filetype - optional filetype has_filetype - if True, treat last dot-delimited token as filetype Notes: 1. If path is a file-like object, then lines are read directly from path, without trying to open it. 2. Non-string paths are returned immediately (excluding the case in Note 1). 3. If has_filetype is True, filetype does not have to be specified. If filetype is specified, has_filetype is ignored, and filetype must match the last dot-delimited token exactly. """ |
if not isinstance(path, str):
try:
return path.readlines()
except AttributeError:
return path
path = normalize_path(path, filetype)
with open(path) as data:
return data.readlines() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def save(self):
"""Pickle the addressbook and a timestamp """ |
if self.contacts: # never write a empty addressbook
cache = {'contacts': self.contacts,
'aadbook_cache': CACHE_FORMAT_VERSION}
pickle.dump(cache, open(self._config.cache_filename, 'wb')) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_intercom_data(self):
"""Specify the user data sent to Intercom API""" |
return {
"user_id": self.intercom_id,
"email": self.email,
"name": self.get_full_name(),
"last_request_at": self.last_login.strftime("%s") if self.last_login else "",
"created_at": self.date_joined.strftime("%s"),
"custom_attributes": {
"is_admin": self.is_superuser
}
} |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def get_list(file,fmt):
'''makes a list out of the fmt from the LspOutput f using the format
i for int
f for float
d for double
s for string'''
out=[]
for i in fmt:
if i == 'i':
out.append(get_int(file));
elif i == 'f' or i == 'd':
out.append(get_float(file));
elif i == 's':
out.append(get_str(file));
else:
raise ValueError("Unexpected flag '{}'".format(i));
return out; |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def flds_firstsort(d):
'''
Perform a lexsort and return the sort indices and shape as a tuple.
'''
shape = [ len( np.unique(d[l]) )
for l in ['xs', 'ys', 'zs'] ];
si = np.lexsort((d['z'],d['y'],d['x']));
return si,shape; |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def flds_sort(d,s):
'''
Sort based on position. Sort with s as a tuple of the sort
indices and shape from first sort.
Parameters:
-----------
d -- the flds/sclr data
s -- (si, shape) sorting and shaping data from firstsort.
'''
labels = [ key for key in d.keys()
if key not in ['t', 'xs', 'ys', 'zs', 'fd', 'sd'] ];
si,shape = s;
for l in labels:
d[l] = d[l][si].reshape(shape);
d[l] = np.squeeze(d[l]);
return d; |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def read(fname,**kw):
'''
Reads an lsp output file and returns a raw dump of data,
sectioned into quantities either as an dictionary or a typed numpy array.
Parameters:
-----------
fname -- filename of thing to read
Keyword Arguments:
------------------
vprint -- Verbose printer. Used in scripts
override -- (type, start) => A tuple of a dump type and a place to start
in the passed file, useful to attempting to read semicorrupted
files.
gzip -- Read as a gzip file.
flds/sclr Specific Arguments:
-----------------------------
var -- list of quantities to be read. For fields, this can consist
of strings that include vector components, e.g., 'Ex'. If
None (default), read all quantities.
keep_edges -- If set to truthy, then don't remove the edges from domains before
concatenation and don't reshape the flds data.
sort -- If not None, sort using these indices, useful for avoiding
resorting. If True and not an ndarray, just sort.
first_sort -- If truthy, sort, and return the sort data for future flds
that should have the same shape.
keep_xs -- Keep the xs's, that is, the grid information. Usually redundant
with x,y,z returned.
return_array -- If set to truthy, then try to return a numpy array with a dtype.
Requires of course that the quantities have the same shape.
'''
if test(kw,'gzip') and kw['gzip'] == 'guess':
kw['gzip'] = re.search(r'\.gz$', fname) is not None;
openf = gzip.open if test(kw, 'gzip') else open;
with openf(fname,'rb') as file:
if test(kw,'override'):
dump, start = kw['override'];
file.seek(start);
header = {'dump_type': dump};
if not test(kw, 'var') and 2 <= header['dump_type'] <= 3 :
raise ValueError(
"If you want to force to read as a scalar, you need to supply the quantities"
);
else:
header = get_header(file);
vprint = kw['vprint'] if test(kw, 'vprint') else lambda s: None;
if 2 <= header['dump_type'] <= 3 :
if not test(kw, 'var'):
var=[i[0] for i in header['quantities']];
else:
var=kw['var'];
keep_edges = test(kw, 'keep_edges');
first_sort = test(kw, 'first_sort');
if test(kw,'sort'):
sort = kw['sort']
else:
sort = None;
keep_xs = test(kw, 'keep_xs');
return_array = test(kw, 'return_array');
readers = {
1: lambda: read_particles(file, header),
2: lambda: read_flds(
file,header,var,vprint,
keep_edges=keep_edges,
first_sort=first_sort,
sort=sort,
keep_xs=keep_xs,
return_array=return_array),
3: lambda: read_flds(
file,header,var, vprint,
keep_edges=keep_edges,
first_sort=first_sort,
sort=sort,
keep_xs=keep_xs,
return_array=return_array,
vector=False),
6: lambda: read_movie(file, header),
10:lambda: read_pext(file,header)
};
try:
d = readers[header['dump_type']]();
except KeyError:
raise NotImplementedError("Other file types not implemented yet!");
return d; |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def sigterm_handler(signum, stack_frame):
""" Just tell the server to exit. WARNING: There are race conditions, for example with TimeoutSocket.accept. We don't care: the user can just rekill the process after like 1 sec. if the first kill did not work. """ |
# pylint: disable-msg=W0613
global _KILLED
for name, cmd in _COMMANDS.iteritems():
if cmd.at_stop:
LOG.info("at_stop: %r", name)
cmd.at_stop()
_KILLED = True
if _HTTP_SERVER:
_HTTP_SERVER.kill()
_HTTP_SERVER.server_close() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def run(options, http_req_handler = HttpReqHandler):
""" Start and execute the server """ |
# pylint: disable-msg=W0613
global _HTTP_SERVER
for x in ('server_version', 'sys_version'):
if _OPTIONS.get(x) is not None:
setattr(http_req_handler, x, _OPTIONS[x])
_HTTP_SERVER = threading_tcp_server.KillableThreadingHTTPServer(
_OPTIONS,
(_OPTIONS['listen_addr'], _OPTIONS['listen_port']),
http_req_handler,
name = "httpdis")
for name, cmd in _COMMANDS.iteritems():
if cmd.at_start:
LOG.info("at_start: %r", name)
cmd.at_start(options)
LOG.info("will now serve")
while not _KILLED:
try:
_HTTP_SERVER.serve_until_killed()
except (socket.error, select.error), why:
if errno.EINTR == why[0]:
LOG.debug("interrupted system call")
elif errno.EBADF == why[0] and _KILLED:
LOG.debug("server close")
else:
raise
LOG.info("exiting") |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def init(options, use_sigterm_handler=True):
""" Must be called just after registration, before anything else """ |
# pylint: disable-msg=W0613
global _AUTH, _OPTIONS
if isinstance(options, dict):
_OPTIONS = DEFAULT_OPTIONS.copy()
_OPTIONS.update(options)
else:
for optname, optvalue in DEFAULT_OPTIONS.iteritems():
if hasattr(options, optname):
_OPTIONS[optname] = getattr(options, optname)
else:
_OPTIONS[optname] = optvalue
if _OPTIONS['testmethods']:
def fortytwo(request):
"test GET method"
return 42
def ping(request):
"test POST method"
return request.payload_params()
register(fortytwo, 'GET')
register(ping, 'POST')
if _OPTIONS['auth_basic_file']:
_AUTH = HttpAuthentication(_OPTIONS['auth_basic_file'],
realm = _OPTIONS['auth_basic']).parse_file()
for name, cmd in _COMMANDS.iteritems():
if cmd.safe_init:
LOG.info("safe_init: %r", name)
cmd.safe_init(_OPTIONS)
if use_sigterm_handler:
# signal.signal(signal.SIGHUP, lambda *x: None) # XXX
signal.signal(signal.SIGTERM, sigterm_handler)
signal.signal(signal.SIGINT, sigterm_handler) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def report(self, req_handler):
"Send a response corresponding to this error to the client"
if self.exc:
req_handler.send_exception(self.code, self.exc, self.headers)
return
text = (self.text
or BaseHTTPRequestHandler.responses[self.code][1]
or "Unknown error")
getattr(req_handler, "send_error_%s" % self.ctype, 'send_error_msg')(self.code, text, self.headers) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def send_error_explain(self, code, message=None, headers=None, content_type=None):
"do not use directly"
if headers is None:
headers = {}
if code in self.responses:
if message is None:
message = self.responses[code][0]
explain = self.responses[code][1]
else:
explain = ""
if message is None:
message = ""
if not isinstance(headers, dict):
headers = {}
if not content_type:
if self._cmd and self._cmd.content_type:
content_type = self._cmd.content_type
else:
content_type = self._DEFAULT_CONTENT_TYPE
if self._cmd and self._cmd.charset:
charset = self._cmd.charset
else:
charset = DEFAULT_CHARSET
headers['Content-type'] = "%s; charset=%s" % (content_type, charset)
data = self._mk_error_explain_data(code, message, explain)
self.end_response(self.build_response(code, data, headers)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def send_exception(self, code, exc_info=None, headers=None):
"send an error response including a backtrace to the client"
if headers is None:
headers = {}
if not exc_info:
exc_info = sys.exc_info()
self.send_error_msg(code,
traceback.format_exception(*exc_info),
headers) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def send_error_json(self, code, message, headers=None):
"send an error to the client. text message is formatted in a json stream"
if headers is None:
headers = {}
self.end_response(HttpResponseJson(code,
{'code': code,
'message': message},
headers)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def common_req(self, execute, send_body=True):
"Common code for GET and POST requests"
self._SERVER = {'CLIENT_ADDR_HOST': self.client_address[0],
'CLIENT_ADDR_PORT': self.client_address[1]}
self._to_log = True
self._cmd = None
self._payload = None
self._path = None
self._payload_params = None
self._query_params = {}
self._fragment = None
(cmd, res, req) = (None, None, None)
try:
try:
path = self._pathify() # pylint: disable-msg=W0612
cmd = path[1:]
res = execute(cmd)
except HttpReqError, e:
e.report(self)
except Exception:
try:
self.send_exception(500) # XXX 500
except Exception: # pylint: disable-msg=W0703
pass
raise
else:
if not isinstance(res, HttpResponse):
req = self.build_response()
if send_body:
req.add_data(res)
req.set_send_body(send_body)
else:
req = res
self.end_response(req)
except socket.error, e:
if e.errno in (errno.ECONNRESET, errno.EPIPE):
return
LOG.exception("exception - cmd=%r - method=%r", cmd, self.command)
except Exception: # pylint: disable-msg=W0703
LOG.exception("exception - cmd=%r - method=%r", cmd, self.command)
finally:
del req, res |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def base_parser():
""" Create arguments parser with basic options and no help message. * -c, --config: load configuration file. * -v, --verbose: increase logging verbosity. `-v`, `-vv`, and `-vvv`. * -q, --quiet: quiet logging except critical level. * -o, --output: output file. (default=sys.stdout) * --basedir: base directory. (default=os.getcwd) * --input-encoding: input data encoding. (default=utf-8) * --output-encoding: output data encoding. (default=utf-8) * --processes: count of processes. * --chunksize: a number of chunks submitted to the process pool. :rtype: :class:`argparse.ArgumentParser` """ |
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument("-c", "--config", dest="config",
type=argparse.FileType('r'),
metavar="FILE",
help="configuration file")
parser.add_argument("-o", "--output", dest="output",
type=argparse.FileType('w'),
metavar="FILE",
default=sys.stdout,
help="output file")
parser.add_argument("--basedir", dest="basedir",
default=os.getcwd(),
help="base directory")
parser.add_argument("--input-encoding", dest="input_encoding",
default=DEFAULT_ENCODING,
help="encoding of input source")
parser.add_argument("--output-encoding", dest="output_encoding",
default=DEFAULT_ENCODING,
help="encoding of output distination")
parser.add_argument("--processes", dest="processes", type=int,
help="number of processes")
parser.add_argument("--chunksize", dest="chunksize", type=int,
default=1,
help="number of chunks submitted to the process pool")
group = parser.add_mutually_exclusive_group()
group.add_argument("-v", "--verbose", dest="verbose",
action="count", default=0,
help="increase logging verbosity")
group.add_argument("-q", "--quiet", dest="quiet",
default=False, action="store_true",
help="set logging to quiet mode")
return parser |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def cliconfig(fp, env=None):
""" Load configuration data. Given pointer is closed internally. If ``None`` is given, force to exit. More detailed information is available on underlying feature, :mod:`clitool.config`. :param fp: opened file pointer of configuration :type fp: FileType :param env: environment to load :type env: str :rtype: dict """ |
if fp is None:
raise SystemExit('No configuration file is given.')
from clitool.config import ConfigLoader
loader = ConfigLoader(fp)
cfg = loader.load(env)
if not fp.closed:
fp.close()
if not cfg:
logging.warn('Configuration may be empty.')
return cfg |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.