text_prompt
stringlengths
157
13.1k
code_prompt
stringlengths
7
19.8k
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_image_description(self, dict): """Add a dict to image description."""
if self._ef is not None: self._ef['0th'][piexif.ImageIFD.ImageDescription] = json.dumps( dict)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_orientation(self, orientation): """Add image orientation to image."""
if not orientation in range(1, 9): print_error( "Error value for orientation, value must be in range(1,9), setting to default 1") self._ef['0th'][piexif.ImageIFD.Orientation] = 1 else: self._ef['0th'][piexif.ImageIFD.Orientation] = orientation
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_date_time_original(self, date_time, time_format='%Y:%m:%d %H:%M:%S.%f'): """Add date time original."""
try: DateTimeOriginal = date_time.strftime(time_format)[:-3] self._ef['Exif'][piexif.ExifIFD.DateTimeOriginal] = DateTimeOriginal except Exception as e: print_error("Error writing DateTimeOriginal, due to " + str(e))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_image_history(self, data): """Add arbitrary string to ImageHistory tag."""
self._ef['0th'][piexif.ImageIFD.ImageHistory] = json.dumps(data)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def add_camera_make_model(self, make, model): ''' Add camera make and model.''' self._ef['0th'][piexif.ImageIFD.Make] = make self._ef['0th'][piexif.ImageIFD.Model] = model
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_direction(self, direction, ref="T", precision=100): """Add image direction."""
# normalize direction direction = direction % 360.0 self._ef["GPS"][piexif.GPSIFD.GPSImgDirection] = ( int(abs(direction) * precision), precision) self._ef["GPS"][piexif.GPSIFD.GPSImgDirectionRef] = ref
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def write(self, filename=None): """Save exif data to file."""
if filename is None: filename = self._filename exif_bytes = piexif.dump(self._ef) with open(self._filename, "rb") as fin: img = fin.read() try: piexif.insert(exif_bytes, img, filename) except IOError: type, value, traceback = sys.exc_info() print >> sys.stderr, "Error saving file:", value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def estimate_sub_second_time(files, interval=0.0): ''' Estimate the capture time of a sequence with sub-second precision EXIF times are only given up to a second of precision. This function uses the given interval between shots to estimate the time inside that second that each picture was taken. ''' if interval <= 0.0: return [exif_time(f) for f in tqdm(files, desc="Reading image capture time")] onesecond = datetime.timedelta(seconds=1.0) T = datetime.timedelta(seconds=interval) for i, f in tqdm(enumerate(files), desc="Estimating subsecond time"): m = exif_time(f) if not m: pass if i == 0: smin = m smax = m + onesecond else: m0 = m - T * i smin = max(smin, m0) smax = min(smax, m0 + onesecond) if not smin or not smax: return None if smin > smax: # ERROR LOG print('Interval not compatible with EXIF times') return None else: s = smin + (smax - smin) / 2 return [s + T * i for i in range(len(files))]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def interpolate_timestamp(capture_times): ''' Interpolate time stamps in case of identical timestamps ''' timestamps = [] num_file = len(capture_times) time_dict = OrderedDict() if num_file < 2: return capture_times # trace identical timestamps (always assume capture_times is sorted) time_dict = OrderedDict() for i, t in enumerate(capture_times): if t not in time_dict: time_dict[t] = { "count": 0, "pointer": 0 } interval = 0 if i != 0: interval = (t - capture_times[i - 1]).total_seconds() time_dict[capture_times[i - 1]]["interval"] = interval time_dict[t]["count"] += 1 if len(time_dict) >= 2: # set time interval as the last available time interval time_dict[time_dict.keys()[-1] ]["interval"] = time_dict[time_dict.keys()[-2]]["interval"] else: # set time interval assuming capture interval is 1 second time_dict[time_dict.keys()[0]]["interval"] = time_dict[time_dict.keys()[ 0]]["count"] * 1. # interpolate timestamps for t in capture_times: d = time_dict[t] s = datetime.timedelta( seconds=d["pointer"] * d["interval"] / float(d["count"])) updated_time = t + s time_dict[t]["pointer"] += 1 timestamps.append(updated_time) return timestamps
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def extract_stream(source, dest, stream_id): ''' Get the data out of the file using ffmpeg @param filename: mp4 filename ''' if not os.path.isfile(source): raise IOError('No such file: ' + source) subprocess.check_output([ 'ffmpeg', '-i', source, '-y', # overwrite - potentially dangerous '-nostats', '-loglevel', '0', '-codec', 'copy', '-map', '0:' + str(stream_id), '-f', 'rawvideo', dest, ])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def verify_exif(filename): ''' Check that image file has the required EXIF fields. Incompatible files will be ignored server side. ''' # required tags in IFD name convention required_exif = required_fields() exif = ExifRead(filename) required_exif_exist = exif.fields_exist(required_exif) return required_exif_exist
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def verify_mapillary_tag(filepath): filepath_keep_original = processing.processed_images_rootpath(filepath) if os.path.isfile(filepath_keep_original): filepath = filepath_keep_original ''' Check that image file has the required Mapillary tag ''' return ExifRead(filepath).mapillary_tag_exists()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def isAudio(self): """ Is this stream labelled as an audio stream? """
val=False if self.__dict__['codec_type']: if str(self.__dict__['codec_type']) == 'audio': val=True return val
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def isVideo(self): """ Is the stream labelled as a video stream. """
val=False if self.__dict__['codec_type']: if self.codec_type == 'video': val=True return val
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def isSubtitle(self): """ Is the stream labelled as a subtitle stream. """
val=False if self.__dict__['codec_type']: if str(self.codec_type)=='subtitle': val=True return val
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def frames(self): """ Returns the length of a video stream in frames. Returns 0 if not a video stream. """
f=0 if self.isVideo() or self.isAudio(): if self.__dict__['nb_frames']: try: f=int(self.__dict__['nb_frames']) except Exception as e: print "None integer frame count" return f
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def durationSeconds(self): """ Returns the runtime duration of the video stream as a floating point number of seconds. Returns 0.0 if not a video stream. """
f=0.0 if self.isVideo() or self.isAudio(): if self.__dict__['duration']: try: f=float(self.__dict__['duration']) except Exception as e: print "None numeric duration" return f
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def bitrate(self): """ Returns bitrate as an integer in bps """
b=0 if self.__dict__['bit_rate']: try: b=int(self.__dict__['bit_rate']) except Exception as e: print "None integer bitrate" return b
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def get_upload_url(credentials): ''' Returns upload URL using new upload API ''' request_url = "https://a.mapillary.com/v3/users/{}/upload_secrets?client_id={}".format( credentials["MAPSettingsUserKey"], CLIENT_ID) request = urllib2.Request(request_url) request.add_header('Authorization', 'Bearer {}'.format( credentials["user_upload_token"])) try: response = json.loads(urllib2.urlopen(request).read()) except requests.exceptions.HTTPError as e: print("Error getting upload parameters, upload could not start") sys.exit(1) return response
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def get_upload_token(mail, pwd): ''' Get upload token ''' try: params = urllib.urlencode({"email": mail, "password": pwd}) response = urllib.urlopen(LOGIN_URL, params) except: return None resp = json.loads(response.read()) if not resp or 'token' not in resp: return None return resp['token']
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def authenticate_with_email_and_pwd(user_email, user_password): ''' Authenticate the user by passing the email and password. This function avoids prompting the command line for user credentials and is useful for calling tools programmatically ''' if user_email is None or user_password is None: raise ValueError( 'Could not authenticate user. Missing username or password') upload_token = uploader.get_upload_token(user_email, user_password) if not upload_token: print("Authentication failed for user name " + user_name + ", please try again.") sys.exit(1) user_key = get_user_key(user_name) if not user_key: print("User name {} does not exist, please try again or contact Mapillary user support.".format( user_name)) sys.exit(1) user_permission_hash, user_signature_hash = get_user_hashes( user_key, upload_token) user_items["MAPSettingsUsername"] = section user_items["MAPSettingsUserKey"] = user_key user_items["user_upload_token"] = upload_token user_items["user_permission_hash"] = user_permission_hash user_items["user_signature_hash"] = user_signature_hash return user_items
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def upload_file(filepath, max_attempts, url, permission, signature, key=None, aws_key=None): ''' Upload file at filepath. ''' if max_attempts == None: max_attempts = MAX_ATTEMPTS filename = os.path.basename(filepath) s3_filename = filename try: s3_filename = ExifRead(filepath).exif_name() except: pass filepath_keep_original = processing.processed_images_rootpath(filepath) filepath_in = filepath if os.path.isfile(filepath_keep_original): filepath = filepath_keep_original # add S3 'path' if given if key is None: s3_key = s3_filename else: s3_key = key + s3_filename parameters = {"key": s3_key, "AWSAccessKeyId": aws_key, "acl": "private", "policy": permission, "signature": signature, "Content-Type": "image/jpeg"} with open(filepath, "rb") as f: encoded_string = f.read() data, headers = encode_multipart( parameters, {'file': {'filename': filename, 'content': encoded_string}}) if (DRY_RUN == False): displayed_upload_error = False for attempt in range(max_attempts): # Initialize response before each attempt response = None try: request = urllib2.Request(url, data=data, headers=headers) response = urllib2.urlopen(request) if response.getcode() == 204: create_upload_log(filepath_in, "upload_success") if displayed_upload_error == True: print("Successful upload of {} on attempt {}".format( filename, attempt)) else: create_upload_log(filepath_in, "upload_failed") break # attempts except urllib2.HTTPError as e: print("HTTP error: {} on {}, will attempt upload again for {} more times".format( e, filename, max_attempts - attempt - 1)) displayed_upload_error = True time.sleep(5) except urllib2.URLError as e: print("URL error: {} on {}, will attempt upload again for {} more times".format( e, filename, max_attempts - attempt - 1)) time.sleep(5) except httplib.HTTPException as e: print("HTTP exception: {} on {}, will attempt upload again for {} more times".format( e, filename, max_attempts - attempt - 1)) time.sleep(5) except OSError as e: print("OS error: {} on {}, will attempt upload again for {} more times".format( e, filename, max_attempts - attempt - 1)) time.sleep(5) except socket.timeout as e: # Specific timeout handling for Python 2.7 print("Timeout error: {} (retrying), will attempt upload again for {} more times".format( filename, max_attempts - attempt - 1)) finally: if response is not None: response.close() else: print('DRY_RUN, Skipping actual image upload. Use this for debug only.')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def murmur3_32(data, seed=0): """MurmurHash3 was written by Austin Appleby, and is placed in the public domain. The author hereby disclaims copyright to this source code."""
c1 = 0xcc9e2d51 c2 = 0x1b873593 length = len(data) h1 = seed roundedEnd = (length & 0xfffffffc) # round down to 4 byte block for i in range(0, roundedEnd, 4): # little endian load order k1 = (ord(data[i]) & 0xff) | ((ord(data[i + 1]) & 0xff) << 8) | \ ((ord(data[i + 2]) & 0xff) << 16) | (ord(data[i + 3]) << 24) k1 *= c1 k1 = (k1 << 15) | ((k1 & 0xffffffff) >> 17) # ROTL32(k1,15) k1 *= c2 h1 ^= k1 h1 = (h1 << 13) | ((h1 & 0xffffffff) >> 19) # ROTL32(h1,13) h1 = h1 * 5 + 0xe6546b64 # tail k1 = 0 val = length & 0x03 if val == 3: k1 = (ord(data[roundedEnd + 2]) & 0xff) << 16 # fallthrough if val in [2, 3]: k1 |= (ord(data[roundedEnd + 1]) & 0xff) << 8 # fallthrough if val in [1, 2, 3]: k1 |= ord(data[roundedEnd]) & 0xff k1 *= c1 k1 = (k1 << 15) | ((k1 & 0xffffffff) >> 17) # ROTL32(k1,15) k1 *= c2 h1 ^= k1 # finalization h1 ^= length # fmix(h1) h1 ^= ((h1 & 0xffffffff) >> 16) h1 *= 0x85ebca6b h1 ^= ((h1 & 0xffffffff) >> 13) h1 *= 0xc2b2ae35 h1 ^= ((h1 & 0xffffffff) >> 16) return h1 & 0xffffffff
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _readline(sock, buf): """Read line of text from the socket. Read a line of text (delimited by "\r\n") from the socket, and return that line along with any trailing characters read from the socket. Args: sock: Socket object, should be connected. buf: String, zero or more characters, returned from an earlier call to _readline or _readvalue (pass an empty string on the first call). Returns: A tuple of (buf, line) where line is the full line read from the socket (minus the "\r\n" characters) and buf is any trailing characters read after the "\r\n" was found (which may be an empty string). """
chunks = [] last_char = b'' while True: # We're reading in chunks, so "\r\n" could appear in one chunk, # or across the boundary of two chunks, so we check for both # cases. # This case must appear first, since the buffer could have # later \r\n characters in it and we want to get the first \r\n. if last_char == b'\r' and buf[0:1] == b'\n': # Strip the last character from the last chunk. chunks[-1] = chunks[-1][:-1] return buf[1:], b''.join(chunks) elif buf.find(b'\r\n') != -1: before, sep, after = buf.partition(b"\r\n") chunks.append(before) return after, b''.join(chunks) if buf: chunks.append(buf) last_char = buf[-1:] buf = _recv(sock, RECV_SIZE) if not buf: raise MemcacheUnexpectedCloseError()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _readvalue(sock, buf, size): """Read specified amount of bytes from the socket. Read size bytes, followed by the "\r\n" characters, from the socket, and return those bytes and any trailing bytes read after the "\r\n". Args: sock: Socket object, should be connected. buf: String, zero or more characters, returned from an earlier call to _readline or _readvalue (pass an empty string on the first call). size: Integer, number of bytes to read from the socket. Returns: A tuple of (buf, value) where value is the bytes read from the socket (there will be exactly size bytes) and buf is trailing characters read after the "\r\n" following the bytes (but not including the \r\n). """
chunks = [] rlen = size + 2 while rlen - len(buf) > 0: if buf: rlen -= len(buf) chunks.append(buf) buf = _recv(sock, RECV_SIZE) if not buf: raise MemcacheUnexpectedCloseError() # Now we need to remove the \r\n from the end. There are two cases we care # about: the \r\n is all in the last buffer, or only the \n is in the last # buffer, and we need to remove the \r from the penultimate buffer. if rlen == 1: # replace the last chunk with the same string minus the last character, # which is always '\r' in this case. chunks[-1] = chunks[-1][:-1] else: # Just remove the "\r\n" from the latest chunk chunks.append(buf[:rlen - 2]) return buf[rlen:], b''.join(chunks)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def close(self): """Close the connection to memcached, if it is open. The next call to a method that requires a connection will re-open it."""
if self.sock is not None: try: self.sock.close() except Exception: pass finally: self.sock = None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def set(self, key, value, expire=0, noreply=None): """ The memcached "set" command. Args: key: str, see class docs for details. value: str, see class docs for details. expire: optional int, number of seconds until the item is expired from the cache, or zero for no expiry (the default). noreply: optional bool, True to not wait for the reply (defaults to self.default_noreply). Returns: If no exception is raised, always returns True. If an exception is raised, the set may or may not have occurred. If noreply is True, then a successful return does not guarantee a successful set. """
if noreply is None: noreply = self.default_noreply return self._store_cmd(b'set', {key: value}, expire, noreply)[key]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def set_many(self, values, expire=0, noreply=None): """ A convenience function for setting multiple values. Args: values: dict(str, str), a dict of keys and values, see class docs for details. expire: optional int, number of seconds until the item is expired from the cache, or zero for no expiry (the default). noreply: optional bool, True to not wait for the reply (defaults to self.default_noreply). Returns: Returns a list of keys that failed to be inserted. If noreply is True, always returns empty list. """
if noreply is None: noreply = self.default_noreply result = self._store_cmd(b'set', values, expire, noreply) return [k for k, v in six.iteritems(result) if not v]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add(self, key, value, expire=0, noreply=None): """ The memcached "add" command. Args: key: str, see class docs for details. value: str, see class docs for details. expire: optional int, number of seconds until the item is expired from the cache, or zero for no expiry (the default). noreply: optional bool, True to not wait for the reply (defaults to self.default_noreply). Returns: If noreply is True, the return value is always True. Otherwise the return value is True if the value was stored, and False if it was not (because the key already existed). """
if noreply is None: noreply = self.default_noreply return self._store_cmd(b'add', {key: value}, expire, noreply)[key]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def replace(self, key, value, expire=0, noreply=None): """ The memcached "replace" command. Args: key: str, see class docs for details. value: str, see class docs for details. expire: optional int, number of seconds until the item is expired from the cache, or zero for no expiry (the default). noreply: optional bool, True to not wait for the reply (defaults to self.default_noreply). Returns: If noreply is True, always returns True. Otherwise returns True if the value was stored and False if it wasn't (because the key didn't already exist). """
if noreply is None: noreply = self.default_noreply return self._store_cmd(b'replace', {key: value}, expire, noreply)[key]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def append(self, key, value, expire=0, noreply=None): """ The memcached "append" command. Args: key: str, see class docs for details. value: str, see class docs for details. expire: optional int, number of seconds until the item is expired from the cache, or zero for no expiry (the default). noreply: optional bool, True to not wait for the reply (defaults to self.default_noreply). Returns: True. """
if noreply is None: noreply = self.default_noreply return self._store_cmd(b'append', {key: value}, expire, noreply)[key]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def prepend(self, key, value, expire=0, noreply=None): """ The memcached "prepend" command. Args: key: str, see class docs for details. value: str, see class docs for details. expire: optional int, number of seconds until the item is expired from the cache, or zero for no expiry (the default). noreply: optional bool, True to not wait for the reply (defaults to self.default_noreply). Returns: True. """
if noreply is None: noreply = self.default_noreply return self._store_cmd(b'prepend', {key: value}, expire, noreply)[key]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def cas(self, key, value, cas, expire=0, noreply=False): """ The memcached "cas" command. Args: key: str, see class docs for details. value: str, see class docs for details. cas: int or str that only contains the characters '0'-'9'. expire: optional int, number of seconds until the item is expired from the cache, or zero for no expiry (the default). noreply: optional bool, False to wait for the reply (the default). Returns: If noreply is True, always returns True. Otherwise returns None if the key didn't exist, False if it existed but had a different cas value and True if it existed and was changed. """
return self._store_cmd(b'cas', {key: value}, expire, noreply, cas)[key]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get(self, key, default=None): """ The memcached "get" command, but only for one key, as a convenience. Args: key: str, see class docs for details. default: value that will be returned if the key was not found. Returns: The value for the key, or default if the key wasn't found. """
return self._fetch_cmd(b'get', [key], False).get(key, default)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def gets(self, key, default=None, cas_default=None): """ The memcached "gets" command for one key, as a convenience. Args: key: str, see class docs for details. default: value that will be returned if the key was not found. cas_default: same behaviour as default argument. Returns: A tuple of (value, cas) or (default, cas_defaults) if the key was not found. """
defaults = (default, cas_default) return self._fetch_cmd(b'gets', [key], True).get(key, defaults)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def delete(self, key, noreply=None): """ The memcached "delete" command. Args: key: str, see class docs for details. noreply: optional bool, True to not wait for the reply (defaults to self.default_noreply). Returns: If noreply is True, always returns True. Otherwise returns True if the key was deleted, and False if it wasn't found. """
if noreply is None: noreply = self.default_noreply cmd = b'delete ' + self.check_key(key) if noreply: cmd += b' noreply' cmd += b'\r\n' results = self._misc_cmd([cmd], b'delete', noreply) if noreply: return True return results[0] == b'DELETED'
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def delete_many(self, keys, noreply=None): """ A convenience function to delete multiple keys. Args: keys: list(str), the list of keys to delete. noreply: optional bool, True to not wait for the reply (defaults to self.default_noreply). Returns: True. If an exception is raised then all, some or none of the keys may have been deleted. Otherwise all the keys have been sent to memcache for deletion and if noreply is False, they have been acknowledged by memcache. """
if not keys: return True if noreply is None: noreply = self.default_noreply cmds = [] for key in keys: cmds.append( b'delete ' + self.check_key(key) + (b' noreply' if noreply else b'') + b'\r\n') self._misc_cmd(cmds, b'delete', noreply) return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def incr(self, key, value, noreply=False): """ The memcached "incr" command. Args: key: str, see class docs for details. value: int, the amount by which to increment the value. noreply: optional bool, False to wait for the reply (the default). Returns: If noreply is True, always returns None. Otherwise returns the new value of the key, or None if the key wasn't found. """
key = self.check_key(key) cmd = b'incr ' + key + b' ' + six.text_type(value).encode('ascii') if noreply: cmd += b' noreply' cmd += b'\r\n' results = self._misc_cmd([cmd], b'incr', noreply) if noreply: return None if results[0] == b'NOT_FOUND': return None return int(results[0])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def decr(self, key, value, noreply=False): """ The memcached "decr" command. Args: key: str, see class docs for details. value: int, the amount by which to increment the value. noreply: optional bool, False to wait for the reply (the default). Returns: If noreply is True, always returns None. Otherwise returns the new value of the key, or None if the key wasn't found. """
key = self.check_key(key) cmd = b'decr ' + key + b' ' + six.text_type(value).encode('ascii') if noreply: cmd += b' noreply' cmd += b'\r\n' results = self._misc_cmd([cmd], b'decr', noreply) if noreply: return None if results[0] == b'NOT_FOUND': return None return int(results[0])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def touch(self, key, expire=0, noreply=None): """ The memcached "touch" command. Args: key: str, see class docs for details. expire: optional int, number of seconds until the item is expired from the cache, or zero for no expiry (the default). noreply: optional bool, True to not wait for the reply (defaults to self.default_noreply). Returns: True if the expiration time was updated, False if the key wasn't found. """
if noreply is None: noreply = self.default_noreply key = self.check_key(key) cmd = b'touch ' + key + b' ' + six.text_type(expire).encode('ascii') if noreply: cmd += b' noreply' cmd += b'\r\n' results = self._misc_cmd([cmd], b'touch', noreply) if noreply: return True return results[0] == b'TOUCHED'
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def stats(self, *args): """ The memcached "stats" command. The returned keys depend on what the "stats" command returns. A best effort is made to convert values to appropriate Python types, defaulting to strings when a conversion cannot be made. Args: *arg: extra string arguments to the "stats" command. See the memcached protocol documentation for more information. Returns: A dict of the returned stats. """
result = self._fetch_cmd(b'stats', args, False) for key, value in six.iteritems(result): converter = STAT_TYPES.get(key, int) try: result[key] = converter(value) except Exception: pass return result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def cache_memlimit(self, memlimit): """ The memcached "cache_memlimit" command. Args: memlimit: int, the number of megabytes to set as the new cache memory limit. Returns: If no exception is raised, always returns True. """
self._fetch_cmd(b'cache_memlimit', [str(int(memlimit))], False) return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def version(self): """ The memcached "version" command. Returns: A string of the memcached version. """
cmd = b"version\r\n" results = self._misc_cmd([cmd], b'version', False) before, _, after = results[0].partition(b' ') if before != b'VERSION': raise MemcacheUnknownError( "Received unexpected response: %s" % results[0]) return after
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def flush_all(self, delay=0, noreply=None): """ The memcached "flush_all" command. Args: delay: optional int, the number of seconds to wait before flushing, or zero to flush immediately (the default). noreply: optional bool, True to not wait for the reply (defaults to self.default_noreply). Returns: True. """
if noreply is None: noreply = self.default_noreply cmd = b'flush_all ' + six.text_type(delay).encode('ascii') if noreply: cmd += b' noreply' cmd += b'\r\n' results = self._misc_cmd([cmd], b'flush_all', noreply) if noreply: return True return results[0] == b'OK'
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def quit(self): """ The memcached "quit" command. This will close the connection with memcached. Calling any other method on this object will re-open the connection, so this object can be re-used after quit. """
cmd = b"quit\r\n" self._misc_cmd([cmd], b'quit', True) self.close()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _create_list(value, allow_filename=False): """Create a list from the input value. If the input is a list already, return it. If the input is a comma-separated string, split it. """
if isinstance(value, list): return value elif isinstance(value, string_type): if allow_filename and os.path.isfile(value): with codecs.open(value, 'r', encoding="utf-8") as handle: return handle.read().splitlines() return value.split(',') else: raise ValueError("Can't create list for input {}".format(value))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def is_compatible_assembly_level(self, ncbi_assembly_level): """Check if a given ncbi assembly level string matches the configured assembly levels."""
configured_ncbi_strings = [self._LEVELS[level] for level in self.assembly_level] return ncbi_assembly_level in configured_ncbi_strings
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def from_kwargs(cls, **kwargs): """Initialise configuration from kwargs."""
config = cls() for slot in cls.__slots__: if slot.startswith('_'): slot = slot[1:] setattr(config, slot, kwargs.pop(slot, cls.get_default(slot))) if kwargs: raise ValueError("Unrecognized option(s): {}".format(kwargs.keys())) return config
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def from_namespace(cls, namespace): """Initialise from argparser Namespace object."""
config = cls() for slot in cls.__slots__: if slot.startswith('_'): slot = slot[1:] if not hasattr(namespace, slot): continue setattr(config, slot, getattr(namespace, slot)) return config
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_default(cls, category): """Get the default value of a given category."""
value = cls._DEFAULTS[category] if not value or not isinstance(value, list): return value return value[0]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_choices(cls, category): """Get all available options for a category."""
value = cls._DEFAULTS[category] if not isinstance(value, list): raise ValueError("{} does not offer choices".format(category)) return value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def main(): """Build and parse command line."""
parser = argument_parser(version=__version__) args = parser.parse_args() if args.debug: log_level = logging.DEBUG elif args.verbose: log_level = logging.INFO else: log_level = logging.WARNING logging.basicConfig(format='%(levelname)s: %(message)s', level=log_level) max_retries = args.retries attempts = 0 ret = args_download(args) while ret == 75 and attempts < max_retries: attempts += 1 logging.error( 'Downloading from NCBI failed due to a connection error, retrying. Retries so far: %s', attempts) ret = args_download(args) return ret
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get(columns=None): """Get or create MetaData singleton."""
if columns is None: columns = _DEFAULT_COLUMNS global _METADATA if not _METADATA: _METADATA = MetaData(columns) return _METADATA
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add(self, entry, local_file): """Add a metadata row."""
row = self.rowClass() for key, val in entry.items(): if key in self.columns: setattr(row, key, val) row.local_filename = os.path.join('.', os.path.relpath(local_file)) self.rows.append(row)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def write(self, handle): """Write metadata to handle."""
handle.write(u"\t".join(self.columns)) handle.write(u"\n") for row in self.rows: row.write(handle)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def config_download(config): """Run the actual download from NCBI with parameters in a config object. Parameters config: NgdConfig A configuration object with the download settings Returns ------- int success code """
try: download_candidates = select_candidates(config) if len(download_candidates) < 1: logging.error("No downloads matched your filter. Please check your options.") return 1 if config.dry_run: print("Considering the following {} assemblies for download:".format(len(download_candidates))) for entry, _ in download_candidates: print(entry['assembly_accession'], entry['organism_name'], sep="\t") return 0 download_jobs = [] for entry, group in download_candidates: download_jobs.extend(create_downloadjob(entry, group, config)) if config.parallel == 1: for dl_job in download_jobs: worker(dl_job) else: # pragma: no cover # Testing multiprocessing code is annoying pool = Pool(processes=config.parallel) jobs = pool.map_async(worker, download_jobs) try: # 0xFFFF is just "a really long time" jobs.get(0xFFFF) except KeyboardInterrupt: # TODO: Actually test this once I figure out how to do this in py.test logging.error("Interrupted by user") return 1 if config.metadata_table: with codecs.open(config.metadata_table, mode='w', encoding='utf-8') as handle: table = metadata.get() table.write(handle) except requests.exceptions.ConnectionError as err: logging.error('Download from NCBI failed: %r', err) # Exit code 75 meas TEMPFAIL in C/C++, so let's stick with that for now. return 75 return 0
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def select_candidates(config): """Select candidates to download. Parameters config: NgdConfig Runtime configuration object Returns ------- list of (<candidate entry>, <taxonomic group>) """
download_candidates = [] for group in config.group: summary_file = get_summary(config.section, group, config.uri, config.use_cache) entries = parse_summary(summary_file) for entry in filter_entries(entries, config): download_candidates.append((entry, group)) return download_candidates
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def filter_entries(entries, config): """Narrrow down which entries to download."""
def in_genus_list(species, genus_list): for genus in genus_list: if species.startswith(genus.capitalize()): return True return False new_entries = [] for entry in entries: if config.type_material and config.type_material != ['any']: requested_types = map(lambda x: config._RELATION_TO_TYPE_MATERIAL[x], config.type_material) if not entry['relation_to_type_material'] or entry['relation_to_type_material'] not in requested_types: logging.debug("Skipping assembly with no reference to type material or reference to type material does not match requested") continue else: print(entry['relation_to_type_material']) if config.genus and not in_genus_list(entry['organism_name'], config.genus): logging.debug('Organism name %r does not start with any in %r, skipping', entry['organism_name'], config.genus) continue if config.species_taxid and entry['species_taxid'] not in config.species_taxid: logging.debug('Species TaxID %r does not match with any in %r, skipping', entry['species_taxid'], config.species_taxid) continue if config.taxid and entry['taxid'] not in config.taxid: logging.debug('Organism TaxID %r does not match with any in %r, skipping', entry['taxid'], config.taxid) continue if not config.is_compatible_assembly_accession(entry['assembly_accession']): logging.debug('Skipping entry with incompatible assembly accession %r', entry['assembly_accession']) continue if not config.is_compatible_assembly_level(entry['assembly_level']): logging.debug('Skipping entry with assembly level %r', entry['assembly_level']) continue if config.refseq_category != 'all' \ and entry['refseq_category'] != config.get_refseq_category_string(config.refseq_category): logging.debug('Skipping entry with refseq_category %r, not %r', entry['refseq_category'], config.refseq_category) continue new_entries.append(entry) return new_entries
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def worker(job): """Run a single download job."""
ret = False try: if job.full_url is not None: req = requests.get(job.full_url, stream=True) ret = save_and_check(req, job.local_file, job.expected_checksum) if not ret: return ret ret = create_symlink(job.local_file, job.symlink_path) except KeyboardInterrupt: # pragma: no cover # TODO: Actually test this once I figure out how to do this in py.test logging.debug("Ignoring keyboard interrupt.") return ret
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_summary(section, domain, uri, use_cache): """Get the assembly_summary.txt file from NCBI and return a StringIO object for it."""
logging.debug('Checking for a cached summary file') cachefile = "{section}_{domain}_assembly_summary.txt".format(section=section, domain=domain) full_cachefile = os.path.join(CACHE_DIR, cachefile) if use_cache and os.path.exists(full_cachefile) and \ datetime.utcnow() - datetime.fromtimestamp(os.path.getmtime(full_cachefile)) < timedelta(days=1): logging.info('Using cached summary.') with codecs.open(full_cachefile, 'r', encoding='utf-8') as fh: return StringIO(fh.read()) logging.debug('Downloading summary for %r/%r uri: %r', section, domain, uri) url = '{uri}/{section}/{domain}/assembly_summary.txt'.format( section=section, domain=domain, uri=uri) req = requests.get(url) if use_cache: try: os.makedirs(CACHE_DIR) except OSError as err: # Errno 17 is "file exists", ignore that, otherwise re-raise if err.errno != 17: raise with codecs.open(full_cachefile, 'w', encoding='utf-8') as fh: fh.write(req.text) return StringIO(req.text)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_downloadjob(entry, domain, config): """Create download jobs for all file formats from a summary file entry."""
logging.info('Checking record %r', entry['assembly_accession']) full_output_dir = create_dir(entry, config.section, domain, config.output) symlink_path = None if config.human_readable: symlink_path = create_readable_dir(entry, config.section, domain, config.output) checksums = grab_checksums_file(entry) # TODO: Only write this when the checksums file changed with open(os.path.join(full_output_dir, 'MD5SUMS'), 'w') as handle: handle.write(checksums) parsed_checksums = parse_checksums(checksums) download_jobs = [] for fmt in config.file_format: try: if has_file_changed(full_output_dir, parsed_checksums, fmt): download_jobs.append( download_file_job(entry, full_output_dir, parsed_checksums, fmt, symlink_path)) elif need_to_create_symlink(full_output_dir, parsed_checksums, fmt, symlink_path): download_jobs.append( create_symlink_job(full_output_dir, parsed_checksums, fmt, symlink_path)) except ValueError as err: logging.error(err) return download_jobs
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_dir(entry, section, domain, output): """Create the output directory for the entry if needed."""
full_output_dir = os.path.join(output, section, domain, entry['assembly_accession']) try: os.makedirs(full_output_dir) except OSError as err: if err.errno == errno.EEXIST and os.path.isdir(full_output_dir): pass else: raise return full_output_dir
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_readable_dir(entry, section, domain, output): """Create the a human-readable directory to link the entry to if needed."""
if domain != 'viral': full_output_dir = os.path.join(output, 'human_readable', section, domain, get_genus_label(entry), get_species_label(entry), get_strain_label(entry)) else: full_output_dir = os.path.join(output, 'human_readable', section, domain, entry['organism_name'].replace(' ', '_'), get_strain_label(entry, viral=True)) try: os.makedirs(full_output_dir) except OSError as err: if err.errno == errno.EEXIST and os.path.isdir(full_output_dir): pass else: raise return full_output_dir
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def grab_checksums_file(entry): """Grab the checksum file for a given entry."""
http_url = convert_ftp_url(entry['ftp_path']) full_url = '{}/md5checksums.txt'.format(http_url) req = requests.get(full_url) return req.text
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def parse_checksums(checksums_string): """Parse a file containing checksums and filenames."""
checksums_list = [] for line in checksums_string.split('\n'): try: # skip empty lines if line == '': continue checksum, filename = line.split() # strip leading ./ if filename.startswith('./'): filename = filename[2:] checksums_list.append({'checksum': checksum, 'file': filename}) except ValueError: logging.debug('Skipping over unexpected checksum line %r', line) continue return checksums_list
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def has_file_changed(directory, checksums, filetype='genbank'): """Check if the checksum of a given file has changed."""
pattern = NgdConfig.get_fileending(filetype) filename, expected_checksum = get_name_and_checksum(checksums, pattern) full_filename = os.path.join(directory, filename) # if file doesn't exist, it has changed if not os.path.isfile(full_filename): return True actual_checksum = md5sum(full_filename) return expected_checksum != actual_checksum
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def need_to_create_symlink(directory, checksums, filetype, symlink_path): """Check if we need to create a symlink for an existing file."""
# If we don't have a symlink path, we don't need to create a symlink if symlink_path is None: return False pattern = NgdConfig.get_fileending(filetype) filename, _ = get_name_and_checksum(checksums, pattern) full_filename = os.path.join(directory, filename) symlink_name = os.path.join(symlink_path, filename) if os.path.islink(symlink_name): existing_link = os.readlink(symlink_name) if full_filename == existing_link: return False return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_name_and_checksum(checksums, end): """Extract a full filename and checksum from the checksums list for a file ending in given end."""
for entry in checksums: if not entry['file'].endswith(end): # wrong file continue # workaround for ..cds_from_genomic.fna.gz and ..rna_from_genomic.fna.gz also # ending in _genomic.fna.gz, causing bogus matches for the plain fasta if '_from_' not in end and '_from_' in entry['file']: # still the wrong file continue filename = entry['file'] expected_checksum = entry['checksum'] return filename, expected_checksum raise ValueError('No entry for file ending in {!r}'.format(end))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def md5sum(filename): """Calculate the md5sum of a file and return the hexdigest."""
hash_md5 = hashlib.md5() with open(filename, 'rb') as handle: for chunk in iter(lambda: handle.read(4096), b''): hash_md5.update(chunk) return hash_md5.hexdigest()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def download_file_job(entry, directory, checksums, filetype='genbank', symlink_path=None): """Generate a DownloadJob that actually triggers a file download."""
pattern = NgdConfig.get_fileending(filetype) filename, expected_checksum = get_name_and_checksum(checksums, pattern) base_url = convert_ftp_url(entry['ftp_path']) full_url = '{}/{}'.format(base_url, filename) local_file = os.path.join(directory, filename) full_symlink = None if symlink_path is not None: full_symlink = os.path.join(symlink_path, filename) # Keep metadata around mtable = metadata.get() mtable.add(entry, local_file) return DownloadJob(full_url, local_file, expected_checksum, full_symlink)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_symlink_job(directory, checksums, filetype, symlink_path): """Create a symlink-creating DownloadJob for an already downloaded file."""
pattern = NgdConfig.get_fileending(filetype) filename, _ = get_name_and_checksum(checksums, pattern) local_file = os.path.join(directory, filename) full_symlink = os.path.join(symlink_path, filename) return DownloadJob(None, local_file, None, full_symlink)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def save_and_check(response, local_file, expected_checksum): """Save the content of an http response and verify the checksum matches."""
with open(local_file, 'wb') as handle: for chunk in response.iter_content(4096): handle.write(chunk) actual_checksum = md5sum(local_file) if actual_checksum != expected_checksum: logging.error('Checksum mismatch for %r. Expected %r, got %r', local_file, expected_checksum, actual_checksum) return False return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_symlink(local_file, symlink_path): """Create a relative symbolic link if symlink path is given. Parameters local_file relative path to output folder (includes ./ prefix) of file saved symlink_path relative path to output folder (includes ./ prefix) of symbolic link to be created Returns ------- bool success code """
if symlink_path is not None: if os.path.exists(symlink_path) or os.path.lexists(symlink_path): os.unlink(symlink_path) local_file = os.path.normpath(local_file) symlink_path = os.path.normpath(symlink_path) num_dirs_upward = len(os.path.dirname(symlink_path).split(os.sep)) local_relative_to_symlink = num_dirs_upward * (os.pardir + os.sep) os.symlink(os.path.join(local_relative_to_symlink, local_file), symlink_path) return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_strain_label(entry, viral=False): """Try to extract a strain from an assemly summary entry. First this checks 'infraspecific_name', then 'isolate', then it tries to get it from 'organism_name'. If all fails, it falls back to just returning the assembly accesion number. """
def get_strain(entry): strain = entry['infraspecific_name'] if strain != '': strain = strain.split('=')[-1] return strain strain = entry['isolate'] if strain != '': return strain if len(entry['organism_name'].split(' ')) > 2 and not viral: strain = ' '.join(entry['organism_name'].split(' ')[2:]) return strain return entry['assembly_accession'] def cleanup(strain): strain = strain.strip() strain = strain.replace(' ', '_') strain = strain.replace(';', '_') strain = strain.replace('/', '_') strain = strain.replace('\\', '_') return strain return cleanup(get_strain(entry))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def pretty(d, indent=0): """A prettier way to print nested dicts """
for key, value in d.items(): print(' ' * indent + str(key)) if isinstance(value, dict): pretty(value, indent+1) else: sys.stderr.write(' ' * (indent+1) + str(value) + '\n')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def desc_taxa(taxid, ncbi, outFH, just_taxids=False): """Write descendent taxa for taxid """
# Main feature of the script is to get all taxa within a given group. descendent_taxa = ncbi.get_descendant_taxa(taxid) descendent_taxa_names = ncbi.translate_to_names(descendent_taxa) if just_taxids: for taxid in descendent_taxa: outFH.write(str(taxid) + '\n') else: for dtn, dt in zip(descendent_taxa_names, descendent_taxa): x = [str(x) for x in [taxid, dt, dtn]] outFH.write('\t'.join(x) + '\n')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def taxon_info(taxid, ncbi, outFH): """Write info on taxid """
taxid = int(taxid) tax_name = ncbi.get_taxid_translator([taxid])[taxid] rank = list(ncbi.get_rank([taxid]).values())[0] lineage = ncbi.get_taxid_translator(ncbi.get_lineage(taxid)) lineage = ['{}:{}'.format(k,v) for k,v in lineage.items()] lineage = ';'.join(lineage) x = [str(x) for x in [tax_name, taxid, rank, lineage]] outFH.write('\t'.join(x) + '\n')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def name2taxid(taxids, ncbi): """Converting taxon names to taxids """
new_taxids = [] for taxid in taxids: try: new_taxids.append(ncbi.get_name_translator([taxid])[taxid][0]) except KeyError: try: new_taxids.append(int(taxid)) except ValueError: msg = 'Error: cannot convert to taxid: {}' raise ValueError(msg.format(taxid)) return new_taxids
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def main(): """Make queries against NCBI Taxa databases """
# Get commandline args args = get_args() # Instantiate the ete NCBI taxa object ncbi = NCBITaxa(dbfile=args.database) ## dbfile location if args.verbose > 1: sys.stderr.write('Taxa database is stored at {}\n'.format(ncbi.dbfile)) # Update the database if required. if args.update is True: if args.verbose > 1: msg = 'Updating the taxonomy database. This may take several minutes...\n' sys.stderr.write(msg) ncbi.update_taxonomy_database() # If names were provided in taxid list, convert to taxids args.taxid = args.taxid.replace('"', '').replace("'", '').split(',') args.taxid = name2taxid(args.taxid, ncbi) # Output if args.outfile is None: outFH = sys.stdout else: outFH = open(args.outfile, 'w') ## header if args.taxon_info: outFH.write('\t'.join(['name', 'taxid', 'rank', 'lineage']) + '\n') elif not args.just_taxids: outFH.write('\t'.join(['parent_taxid', 'descendent_taxid', 'descendent_name']) + '\n') ## body for taxid in args.taxid: if args.taxon_info: taxon_info(taxid, ncbi, outFH) else: desc_taxa(taxid, ncbi, outFH, args.just_taxids) outFH.close()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def derivative(self,x): ''' Evaluates the derivative of the interpolated function at the given input. Parameters ---------- x : np.array or float Real values to be evaluated in the interpolated function. Returns ------- dydx : np.array or float The interpolated function's first derivative evaluated at x: dydx = f'(x), with the same shape as x. ''' z = np.asarray(x) return (self._der(z.flatten())).reshape(z.shape)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def eval_with_derivative(self,x): ''' Evaluates the interpolated function and its derivative at the given input. Parameters ---------- x : np.array or float Real values to be evaluated in the interpolated function. Returns ------- y : np.array or float The interpolated function evaluated at x: y = f(x), with the same shape as x. dydx : np.array or float The interpolated function's first derivative evaluated at x: dydx = f'(x), with the same shape as x. ''' z = np.asarray(x) y, dydx = self._evalAndDer(z.flatten()) return y.reshape(z.shape), dydx.reshape(z.shape)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def derivative(self,*args): ''' Returns the derivative of the function with respect to the first dimension. ''' if self.i_dim == 0: return np.ones_like(*args[0]) else: return np.zeros_like(*args[0])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def derivativeX(self,*args): ''' Returns the derivative of the function with respect to the X dimension. This is the first input whenever n_dims < 4 and the second input otherwise. ''' if self.n_dims >= 4: j = 1 else: j = 0 if self.i_dim == j: return np.ones_like(*args[0]) else: return np.zeros_like(*args[0])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def derivativeW(self,*args): ''' Returns the derivative of the function with respect to the W dimension. This should only exist when n_dims >= 4. ''' if self.n_dims >= 4: j = 0 else: assert False, "Derivative with respect to W can't be called when n_dims < 4!" if self.i_dim == j: return np.ones_like(*args[0]) else: return np.zeros_like(*args[0])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def _derY(self,x,y): ''' Returns the derivative with respect to y of the interpolated function at each value in x,y. Only called internally by HARKinterpolator2D.derivativeY. ''' if _isscalar(x): x_pos = max(min(self.xSearchFunc(self.x_list,x),self.x_n-1),1) y_pos = max(min(self.ySearchFunc(self.y_list,y),self.y_n-1),1) else: x_pos = self.xSearchFunc(self.x_list,x) x_pos[x_pos < 1] = 1 x_pos[x_pos > self.x_n-1] = self.x_n-1 y_pos = self.ySearchFunc(self.y_list,y) y_pos[y_pos < 1] = 1 y_pos[y_pos > self.y_n-1] = self.y_n-1 alpha = (x - self.x_list[x_pos-1])/(self.x_list[x_pos] - self.x_list[x_pos-1]) dfdy = ( ((1-alpha)*self.f_values[x_pos-1,y_pos] + alpha*self.f_values[x_pos,y_pos]) - ((1-alpha)*self.f_values[x_pos-1,y_pos-1] + alpha*self.f_values[x_pos,y_pos-1]))/(self.y_list[y_pos] - self.y_list[y_pos-1]) return dfdy
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def _derX(self,x,y,z): ''' Returns the derivative with respect to x of the interpolated function at each value in x,y,z. Only called internally by HARKinterpolator3D.derivativeX. ''' if _isscalar(x): x_pos = max(min(self.xSearchFunc(self.x_list,x),self.x_n-1),1) y_pos = max(min(self.ySearchFunc(self.y_list,y),self.y_n-1),1) z_pos = max(min(self.zSearchFunc(self.z_list,z),self.z_n-1),1) else: x_pos = self.xSearchFunc(self.x_list,x) x_pos[x_pos < 1] = 1 x_pos[x_pos > self.x_n-1] = self.x_n-1 y_pos = self.ySearchFunc(self.y_list,y) y_pos[y_pos < 1] = 1 y_pos[y_pos > self.y_n-1] = self.y_n-1 z_pos = self.zSearchFunc(self.z_list,z) z_pos[z_pos < 1] = 1 z_pos[z_pos > self.z_n-1] = self.z_n-1 beta = (y - self.y_list[y_pos-1])/(self.y_list[y_pos] - self.y_list[y_pos-1]) gamma = (z - self.z_list[z_pos-1])/(self.z_list[z_pos] - self.z_list[z_pos-1]) dfdx = ( ( (1-beta)*(1-gamma)*self.f_values[x_pos,y_pos-1,z_pos-1] + (1-beta)*gamma*self.f_values[x_pos,y_pos-1,z_pos] + beta*(1-gamma)*self.f_values[x_pos,y_pos,z_pos-1] + beta*gamma*self.f_values[x_pos,y_pos,z_pos]) - ( (1-beta)*(1-gamma)*self.f_values[x_pos-1,y_pos-1,z_pos-1] + (1-beta)*gamma*self.f_values[x_pos-1,y_pos-1,z_pos] + beta*(1-gamma)*self.f_values[x_pos-1,y_pos,z_pos-1] + beta*gamma*self.f_values[x_pos-1,y_pos,z_pos]))/(self.x_list[x_pos] - self.x_list[x_pos-1]) return dfdx
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def _evalAndDer(self,x): ''' Returns the level and first derivative of the function at each value in x. Only called internally by HARKinterpolator1D.eval_and_der. ''' m = len(x) fx = np.zeros((m,self.funcCount)) for j in range(self.funcCount): fx[:,j] = self.functions[j](x) fx[np.isnan(fx)] = np.inf i = np.argmin(fx,axis=1) y = fx[np.arange(m),i] dydx = np.zeros_like(y) for j in range(self.funcCount): c = i == j dydx[c] = self.functions[j].derivative(x[c]) return y,dydx
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def derivativeX(self,x,y): ''' Evaluate the first derivative with respect to x of the function at given state space points. Parameters ---------- x : np.array First input values. y : np.array Second input values; should be of same shape as x. Returns ------- dfdx_out : np.array First derivative of function with respect to the first input, evaluated at (x,y), of same shape as inputs. ''' xShift = self.lowerBound(y) dfdx_out = self.func.derivativeX(x-xShift,y) return dfdx_out
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def derivativeY(self,x,y): ''' Evaluate the first derivative with respect to y of the function at given state space points. Parameters ---------- x : np.array First input values. y : np.array Second input values; should be of same shape as x. Returns ------- dfdy_out : np.array First derivative of function with respect to the second input, evaluated at (x,y), of same shape as inputs. ''' xShift,xShiftDer = self.lowerBound.eval_with_derivative(y) dfdy_out = self.func.derivativeY(x-xShift,y) - xShiftDer*self.func.derivativeX(x-xShift,y) return dfdy_out
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def derivativeZ(self,x,y,z): ''' Evaluate the first derivative with respect to z of the function at given state space points. Parameters ---------- x : np.array First input values. y : np.array Second input values; should be of same shape as x. z : np.array Third input values; should be of same shape as x. Returns ------- dfdz_out : np.array First derivative of function with respect to the third input, evaluated at (x,y,z), of same shape as inputs. ''' xShift = self.lowerBound(y) dfdz_out = self.func.derivativeZ(x-xShift,y,z) return dfdz_out
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def _derY(self,x,y,z): ''' Returns the derivative with respect to y of the interpolated function at each value in x,y,z. Only called internally by HARKinterpolator3D.derivativeY. ''' if _isscalar(x): z_pos = max(min(np.searchsorted(self.z_list,z),self.z_n-1),1) alpha = (z - self.z_list[z_pos-1])/(self.z_list[z_pos] - self.z_list[z_pos-1]) dfdy = (1-alpha)*self.xyInterpolators[z_pos-1].derivativeY(x,y) + alpha*self.xyInterpolators[z_pos].derivativeY(x,y) else: m = len(x) z_pos = np.searchsorted(self.z_list,z) z_pos[z_pos > self.z_n-1] = self.z_n-1 z_pos[z_pos < 1] = 1 dfdy = np.zeros(m) + np.nan if x.size > 0: for i in range(1,self.z_n): c = z_pos == i if np.any(c): alpha = (z[c] - self.z_list[i-1])/(self.z_list[i] - self.z_list[i-1]) dfdy[c] = (1-alpha)*self.xyInterpolators[i-1].derivativeY(x[c],y[c]) + alpha*self.xyInterpolators[i].derivativeY(x[c],y[c]) return dfdy
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def _derY(self,x,y): ''' Returns the derivative with respect to y of the interpolated function at each value in x,y. Only called internally by HARKinterpolator2D.derivativeX. ''' x_pos, y_pos = self.findSector(x,y) alpha, beta = self.findCoords(x,y,x_pos,y_pos) # Get four corners data for each point xA = self.x_values[x_pos,y_pos] xB = self.x_values[x_pos+1,y_pos] xC = self.x_values[x_pos,y_pos+1] xD = self.x_values[x_pos+1,y_pos+1] yA = self.y_values[x_pos,y_pos] yB = self.y_values[x_pos+1,y_pos] yC = self.y_values[x_pos,y_pos+1] yD = self.y_values[x_pos+1,y_pos+1] fA = self.f_values[x_pos,y_pos] fB = self.f_values[x_pos+1,y_pos] fC = self.f_values[x_pos,y_pos+1] fD = self.f_values[x_pos+1,y_pos+1] # Calculate components of the alpha,beta --> x,y delta translation matrix alpha_x = (1-beta)*(xB-xA) + beta*(xD-xC) alpha_y = (1-beta)*(yB-yA) + beta*(yD-yC) beta_x = (1-alpha)*(xC-xA) + alpha*(xD-xB) beta_y = (1-alpha)*(yC-yA) + alpha*(yD-yB) # Invert the delta translation matrix into x,y --> alpha,beta det = alpha_x*beta_y - beta_x*alpha_y y_alpha = -beta_x/det y_beta = alpha_x/det # Calculate the derivative of f w.r.t. alpha and beta dfda = (1-beta)*(fB-fA) + beta*(fD-fC) dfdb = (1-alpha)*(fC-fA) + alpha*(fD-fB) # Calculate the derivative with respect to x (and return it) dfdy = y_alpha*dfda + y_beta*dfdb return dfdy
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def distanceMetric(thing_A,thing_B): ''' A "universal distance" metric that can be used as a default in many settings. Parameters ---------- thing_A : object A generic object. thing_B : object Another generic object. Returns: ------------ distance : float The "distance" between thing_A and thing_B. ''' # Get the types of the two inputs typeA = type(thing_A) typeB = type(thing_B) if typeA is list and typeB is list: lenA = len(thing_A) # If both inputs are lists, then the distance between lenB = len(thing_B) # them is the maximum distance between corresponding if lenA == lenB: # elements in the lists. If they differ in length, distance_temp = [] # the distance is the difference in lengths. for n in range(lenA): distance_temp.append(distanceMetric(thing_A[n],thing_B[n])) distance = max(distance_temp) else: distance = float(abs(lenA - lenB)) # If both inputs are numbers, return their difference elif (typeA is int or typeB is float) and (typeB is int or typeB is float): distance = float(abs(thing_A - thing_B)) # If both inputs are array-like, return the maximum absolute difference b/w # corresponding elements (if same shape); return largest difference in dimensions # if shapes do not align. elif hasattr(thing_A,'shape') and hasattr(thing_B,'shape'): if thing_A.shape == thing_B.shape: distance = np.max(abs(thing_A - thing_B)) else: distance = np.max(abs(thing_A.shape - thing_B.shape)) # If none of the above cases, but the objects are of the same class, call # the distance method of one on the other elif thing_A.__class__.__name__ == thing_B.__class__.__name__: if thing_A.__class__.__name__ == 'function': distance = 0.0 else: distance = thing_A.distance(thing_B) else: # Failsafe: the inputs are very far apart distance = 1000.0 return distance
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def distance(self,other): ''' A generic distance method, which requires the existence of an attribute called distance_criteria, giving a list of strings naming the attributes to be considered by the distance metric. Parameters ---------- other : object Another object to compare this instance to. Returns ------- (unnamed) : float The distance between this object and another, using the "universal distance" metric. ''' distance_list = [0.0] for attr_name in self.distance_criteria: try: obj_A = getattr(self,attr_name) obj_B = getattr(other,attr_name) distance_list.append(distanceMetric(obj_A,obj_B)) except: distance_list.append(1000.0) # if either object lacks attribute, they are not the same return max(distance_list)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def assignParameters(self,**kwds): ''' Assign an arbitrary number of attributes to this agent. Parameters ---------- **kwds : keyword arguments Any number of keyword arguments of the form key=value. Each value will be assigned to the attribute named in self. Returns ------- none ''' for key in kwds: setattr(self,key,kwds[key])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def getAvg(self,varname,**kwds): ''' Calculates the average of an attribute of this instance. Returns NaN if no such attribute. Parameters ---------- varname : string The name of the attribute whose average is to be calculated. This attribute must be an np.array or other class compatible with np.mean. Returns ------- avg : float or np.array The average of this attribute. Might be an array if the axis keyword is passed. ''' if hasattr(self,varname): return np.mean(getattr(self,varname),**kwds) else: return np.nan
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def timeFlip(self): ''' Reverse the flow of time for this instance. Parameters ---------- none Returns ------- none ''' for name in self.time_vary: exec('self.' + name + '.reverse()') self.time_flow = not self.time_flow
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def addToTimeVary(self,*params): ''' Adds any number of parameters to time_vary for this instance. Parameters ---------- params : string Any number of strings naming attributes to be added to time_vary Returns ------- None ''' for param in params: if param not in self.time_vary: self.time_vary.append(param)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def addToTimeInv(self,*params): ''' Adds any number of parameters to time_inv for this instance. Parameters ---------- params : string Any number of strings naming attributes to be added to time_inv Returns ------- None ''' for param in params: if param not in self.time_inv: self.time_inv.append(param)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def delFromTimeVary(self,*params): ''' Removes any number of parameters from time_vary for this instance. Parameters ---------- params : string Any number of strings naming attributes to be removed from time_vary Returns ------- None ''' for param in params: if param in self.time_vary: self.time_vary.remove(param)