text_prompt
stringlengths
157
13.1k
code_prompt
stringlengths
7
19.8k
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def read_buf(self): """Read database file"""
with open(self.filepath, 'rb') as handler: try: buf = handler.read() # There should be a header at least if len(buf) < 124: raise KPError('Unexpected file size. It should be more or' 'equal 124 bytes but it is ' '{0}!'.format(len(buf))) except: raise return buf
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def close(self): """This method closes the database correctly."""
if self.filepath is not None: if path.isfile(self.filepath+'.lock'): remove(self.filepath+'.lock') self.filepath = None self.read_only = False self.lock() return True else: raise KPError('Can\'t close a not opened file')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def lock(self): """This method locks the database."""
self.password = None self.keyfile = None self.groups[:] = [] self.entries[:] = [] self._group_order[:] = [] self._entry_order[:] = [] self.root_group = v1Group() self._num_groups = 1 self._num_entries = 0 return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def unlock(self, password = None, keyfile = None, buf = None): """Unlock the database. masterkey is needed. """
if ((password is None or password == "") and (keyfile is None or keyfile == "")): raise KPError("A password/keyfile is needed") elif ((type(password) is not str and password is not None) or (type(keyfile) is not str and keyfile is not None)): raise KPError("password/keyfile must be a string.") if keyfile == "": keyfile = None if password == "": password = None self.password = password self.keyfile = keyfile return self.load(buf)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def remove_group(self, group = None): """This method removes a group. The group needed to remove the group. group must be a v1Group. """
if group is None: raise KPError("Need group to remove a group") elif type(group) is not v1Group: raise KPError("group must be v1Group") children = [] entries = [] if group in self.groups: # Save all children and entries to # delete them later children.extend(group.children) entries.extend(group.entries) # Finally remove group group.parent.children.remove(group) self.groups.remove(group) else: raise KPError("Given group doesn't exist") self._num_groups -= 1 for i in children: self.remove_group(i) for i in entries: self.remove_entry(i) return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def move_group(self, group = None, parent = None): """Append group to a new parent. group and parent must be v1Group-instances. """
if group is None or type(group) is not v1Group: raise KPError("A valid group must be given.") elif parent is not None and type(parent) is not v1Group: raise KPError("parent must be a v1Group.") elif group is parent: raise KPError("group and parent must not be the same group") if parent is None: parent = self.root_group if group in self.groups: self.groups.remove(group) group.parent.children.remove(group) group.parent = parent if parent.children: if parent.children[-1] is self.groups[-1]: self.groups.append(group) else: new_index = self.groups.index(parent.children[-1]) + 1 self.groups.insert(new_index, group) else: new_index = self.groups.index(parent) + 1 self.groups.insert(new_index, group) parent.children.append(group) if parent is self.root_group: group.level = 0 else: group.level = parent.level + 1 if group.children: self._move_group_helper(group) group.last_mod = datetime.now().replace(microsecond=0) return True else: raise KPError("Didn't find given group.")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def move_group_in_parent(self, group = None, index = None): """Move group to another position in group's parent. index must be a valid index of group.parent.groups """
if group is None or index is None: raise KPError("group and index must be set") elif type(group) is not v1Group or type(index) is not int: raise KPError("group must be a v1Group-instance and index " "must be an integer.") elif group not in self.groups: raise KPError("Given group doesn't exist") elif index < 0 or index >= len(group.parent.children): raise KPError("index must be a valid index if group.parent.groups") else: group_at_index = group.parent.children[index] pos_in_parent = group.parent.children.index(group) pos_in_groups = self.groups.index(group) pos_in_groups2 = self.groups.index(group_at_index) group.parent.children[index] = group group.parent.children[pos_in_parent] = group_at_index self.groups[pos_in_groups2] = group self.groups[pos_in_groups] = group_at_index if group.children: self._move_group_helper(group) if group_at_index.children: self._move_group_helper(group_at_index) group.last_mod = datetime.now().replace(microsecond=0) return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _move_group_helper(self, group): """A helper to move the chidren of a group."""
for i in group.children: self.groups.remove(i) i.level = group.level + 1 self.groups.insert(self.groups.index(group) + 1, i) if i.children: self._move_group_helper(i)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_entry(self, group = None, title = "", image = 1, url = "", username = "", password = "", comment = "", y = 2999, mon = 12, d = 28, h = 23, min_ = 59, s = 59): """This method creates a new entry. The group which should hold the entry is needed. image must be an unsigned int >0, group a v1Group. It is possible to give an expire date in the following way: - y is the year between 1 and 9999 inclusive - mon is the month between 1 and 12 - d is a day in the given month - h is a hour between 0 and 23 - min_ is a minute between 0 and 59 - s is a second between 0 and 59 The special date 2999-12-28 23:59:59 means that entry expires never. """
if (type(title) is not str or type(image) is not int or image < 0 or type(url) is not str or type(username) is not str or type(password) is not str or type(comment) is not str or type(y) is not int or type(mon) is not int or type(d) is not int or type(h) is not int or type(min_) is not int or type(s) is not int or type(group) is not v1Group): raise KPError("One argument has not a valid type.") elif group not in self.groups: raise KPError("Group doesn't exist.") elif (y > 9999 or y < 1 or mon > 12 or mon < 1 or d > 31 or d < 1 or h > 23 or h < 0 or min_ > 59 or min_ < 0 or s > 59 or s < 0): raise KPError("No legal date") elif (((mon == 1 or mon == 3 or mon == 5 or mon == 7 or mon == 8 or mon == 10 or mon == 12) and d > 31) or ((mon == 4 or mon == 6 or mon == 9 or mon == 11) and d > 30) or (mon == 2 and d > 28)): raise KPError("Given day doesn't exist in given month") Random.atfork() uuid = Random.get_random_bytes(16) entry = v1Entry(group.id_, group, image, title, url, username, password, comment, datetime.now().replace(microsecond = 0), datetime.now().replace(microsecond = 0), datetime.now().replace(microsecond = 0), datetime(y, mon, d, h, min_, s), uuid) self.entries.append(entry) group.entries.append(entry) self._num_entries += 1 return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def remove_entry(self, entry = None): """This method can remove entries. The v1Entry-object entry is needed. """
if entry is None or type(entry) is not v1Entry: raise KPError("Need an entry.") elif entry in self.entries: entry.group.entries.remove(entry) self.entries.remove(entry) self._num_entries -= 1 return True else: raise KPError("Given entry doesn't exist.")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def move_entry(self, entry = None, group = None): """Move an entry to another group. A v1Group group and a v1Entry entry are needed. """
if entry is None or group is None or type(entry) is not v1Entry or \ type(group) is not v1Group: raise KPError("Need an entry and a group.") elif entry not in self.entries: raise KPError("No entry found.") elif group in self.groups: entry.group.entries.remove(entry) group.entries.append(entry) entry.group_id = group.id_ entry.group = group return True else: raise KPError("No group found.")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def move_entry_in_group(self, entry = None, index = None): """Move entry to another position inside a group. An entry and a valid index to insert the entry in the entry list of the holding group is needed. 0 means that the entry is moved to the first position 1 to the second and so on. """
if entry is None or index is None or type(entry) is not v1Entry \ or type(index) is not int: raise KPError("Need an entry and an index.") elif index < 0 or index > len(entry.group.entries)-1: raise KPError("Index is not valid.") elif entry not in self.entries: raise KPError("Entry not found.") pos_in_group = entry.group.entries.index(entry) pos_in_entries = self.entries.index(entry) entry_at_index = entry.group.entries[index] pos_in_entries2 = self.entries.index(entry_at_index) entry.group.entries[index] = entry entry.group.entries[pos_in_group] = entry_at_index self.entries[pos_in_entries2] = entry self.entries[pos_in_entries] = entry_at_index return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _transform_key(self, masterkey): """This method creates the key to decrypt the database"""
aes = AES.new(self._transf_randomseed, AES.MODE_ECB) # Encrypt the created hash for _ in range(self._key_transf_rounds): masterkey = aes.encrypt(masterkey) # Finally, hash it again... sha_obj = SHA256.new() sha_obj.update(masterkey) masterkey = sha_obj.digest() # ...and hash the result together with the randomseed sha_obj = SHA256.new() sha_obj.update(self._final_randomseed + masterkey) return sha_obj.digest()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_filekey(self): """This method creates a key from a keyfile."""
if not os.path.exists(self.keyfile): raise KPError('Keyfile not exists.') try: with open(self.keyfile, 'rb') as handler: handler.seek(0, os.SEEK_END) size = handler.tell() handler.seek(0, os.SEEK_SET) if size == 32: return handler.read(32) elif size == 64: try: return binascii.unhexlify(handler.read(64)) except (TypeError, binascii.Error): handler.seek(0, os.SEEK_SET) sha = SHA256.new() while True: buf = handler.read(2048) sha.update(buf) if len(buf) < 2048: break return sha.digest() except IOError as e: raise KPError('Could not read file: %s' % e)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _cbc_decrypt(self, final_key, crypted_content): """This method decrypts the database"""
# Just decrypt the content with the created key aes = AES.new(final_key, AES.MODE_CBC, self._enc_iv) decrypted_content = aes.decrypt(crypted_content) padding = decrypted_content[-1] if sys.version > '3': padding = decrypted_content[-1] else: padding = ord(decrypted_content[-1]) decrypted_content = decrypted_content[:len(decrypted_content)-padding] return decrypted_content
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _cbc_encrypt(self, content, final_key): """This method encrypts the content."""
aes = AES.new(final_key, AES.MODE_CBC, self._enc_iv) padding = (16 - len(content) % AES.block_size) for _ in range(padding): content += chr(padding).encode() temp = bytes(content) return aes.encrypt(temp)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _read_group_field(self, group, levels, field_type, field_size, decrypted_content): """This method handles the different fields of a group"""
if field_type == 0x0000: # Ignored (commentar block) pass elif field_type == 0x0001: group.id_ = struct.unpack('<I', decrypted_content[:4])[0] elif field_type == 0x0002: try: group.title = struct.unpack('<{0}s'.format(field_size-1), decrypted_content[:field_size-1])[0].decode('utf-8') except UnicodeDecodeError: group.title = struct.unpack('<{0}s'.format(field_size-1), decrypted_content[:field_size-1])[0].decode('latin-1') decrypted_content = decrypted_content[1:] elif field_type == 0x0003: group.creation = self._get_date(decrypted_content) elif field_type == 0x0004: group.last_mod = self._get_date(decrypted_content) elif field_type == 0x0005: group.last_access = self._get_date(decrypted_content) elif field_type == 0x0006: group.expire = self._get_date(decrypted_content) elif field_type == 0x0007: group.image = struct.unpack('<I', decrypted_content[:4])[0] elif field_type == 0x0008: level = struct.unpack('<H', decrypted_content[:2])[0] group.level = level levels.append(level) elif field_type == 0x0009: group.flags = struct.unpack('<I', decrypted_content[:4])[0] elif field_type == 0xFFFF: pass else: return False return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _read_entry_field(self, entry, field_type, field_size, decrypted_content): """This method handles the different fields of an entry"""
if field_type == 0x0000: # Ignored pass elif field_type == 0x0001: entry.uuid = decrypted_content[:16] elif field_type == 0x0002: entry.group_id = struct.unpack('<I', decrypted_content[:4])[0] elif field_type == 0x0003: entry.image = struct.unpack('<I', decrypted_content[:4])[0] elif field_type == 0x0004: entry.title = struct.unpack('<{0}s'.format(field_size-1), decrypted_content[:field_size-1])[0].decode('utf-8') decrypted_content = decrypted_content[1:] elif field_type == 0x0005: entry.url = struct.unpack('<{0}s'.format(field_size-1), decrypted_content[:field_size-1])[0].decode('utf-8') decrypted_content = decrypted_content[1:] elif field_type == 0x0006: entry.username = struct.unpack('<{0}s'.format(field_size-1), decrypted_content[:field_size-1])[0].decode('utf-8') decrypted_content = decrypted_content[1:] elif field_type == 0x0007: entry.password = struct.unpack('<{0}s'.format(field_size-1), decrypted_content[:field_size-1])[0].decode('utf-8') elif field_type == 0x0008: entry.comment = struct.unpack('<{0}s'.format(field_size-1), decrypted_content[:field_size-1])[0].decode('utf-8') elif field_type == 0x0009: entry.creation = self._get_date(decrypted_content) elif field_type == 0x000A: entry.last_mod = self._get_date(decrypted_content) elif field_type == 0x000B: entry.last_access = self._get_date(decrypted_content) elif field_type == 0x000C: entry.expire = self._get_date(decrypted_content) elif field_type == 0x000D: entry.binary_desc = struct.unpack('<{0}s'.format(field_size-1), decrypted_content[:field_size-1])[0].decode('utf-8') elif field_type == 0x000E: entry.binary = decrypted_content[:field_size] elif field_type == 0xFFFF: pass else: return False return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_date(self, decrypted_content): """This method is used to decode the packed dates of entries"""
# Just copied from original KeePassX source date_field = struct.unpack('<5B', decrypted_content[:5]) dw1 = date_field[0] dw2 = date_field[1] dw3 = date_field[2] dw4 = date_field[3] dw5 = date_field[4] y = (dw1 << 6) | (dw2 >> 2) mon = ((dw2 & 0x03) << 2) | (dw3 >> 6) d = (dw3 >> 1) & 0x1F h = ((dw3 & 0x01) << 4) | (dw4 >> 4) min_ = ((dw4 & 0x0F) << 2) | (dw5 >> 6) s = dw5 & 0x3F return datetime(y, mon, d, h, min_, s)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _pack_date(self, date): """This method is used to encode dates"""
# Just copied from original KeePassX source y, mon, d, h, min_, s = date.timetuple()[:6] dw1 = 0x0000FFFF & ((y>>6) & 0x0000003F) dw2 = 0x0000FFFF & ((y & 0x0000003F)<<2 | ((mon>>2) & 0x00000003)) dw3 = 0x0000FFFF & (((mon & 0x0000003)<<6) | ((d & 0x0000001F)<<1) \ | ((h>>4) & 0x00000001)) dw4 = 0x0000FFFF & (((h & 0x0000000F)<<4) | ((min_>>2) & 0x0000000F)) dw5 = 0x0000FFFF & (((min_ & 0x00000003)<<6) | (s & 0x0000003F)) return struct.pack('<5B', dw1, dw2, dw3, dw4, dw5)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _create_group_tree(self, levels): """This method creates a group tree"""
if levels[0] != 0: raise KPError("Invalid group tree") for i in range(len(self.groups)): if(levels[i] == 0): self.groups[i].parent = self.root_group self.groups[i].index = len(self.root_group.children) self.root_group.children.append(self.groups[i]) continue j = i-1 while j >= 0: if levels[j] < levels[i]: if levels[i]-levels[j] != 1: raise KPError("Invalid group tree") self.groups[i].parent = self.groups[j] self.groups[i].index = len(self.groups[j].children) self.groups[i].parent.children.append(self.groups[i]) break if j == 0: raise KPError("Invalid group tree") j -= 1 for e in range(len(self.entries)): for g in range(len(self.groups)): if self.entries[e].group_id == self.groups[g].id_: self.groups[g].entries.append(self.entries[e]) self.entries[e].group = self.groups[g] # from original KeePassX-code, but what does it do? self.entries[e].index = 0 return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _save_group_field(self, field_type, group): """This method packs a group field"""
if field_type == 0x0000: # Ignored (commentar block) pass elif field_type == 0x0001: if group.id_ is not None: return (4, struct.pack('<I', group.id_)) elif field_type == 0x0002: if group.title is not None: return (len(group.title.encode())+1, (group.title+'\0').encode()) elif field_type == 0x0003: if group.creation is not None: return (5, self._pack_date(group.creation)) elif field_type == 0x0004: if group.last_mod is not None: return (5, self._pack_date(group.last_mod)) elif field_type == 0x0005: if group.last_access is not None: return (5, self._pack_date(group.last_access)) elif field_type == 0x0006: if group.expire is not None: return (5, self._pack_date(group.expire)) elif field_type == 0x0007: if group.image is not None: return (4, struct.pack('<I', group.image)) elif field_type == 0x0008: if group.level is not None: return (2, struct.pack('<H', group.level)) elif field_type == 0x0009: if group.flags is not None: return (4, struct.pack('<I', group.flags)) return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _save_entry_field(self, field_type, entry): """This group packs a entry field"""
if field_type == 0x0000: # Ignored pass elif field_type == 0x0001: if entry.uuid is not None: return (16, entry.uuid) elif field_type == 0x0002: if entry.group_id is not None: return (4, struct.pack('<I', entry.group_id)) elif field_type == 0x0003: if entry.image is not None: return (4, struct.pack('<I', entry.image)) elif field_type == 0x0004: if entry.title is not None: return (len(entry.title.encode())+1, (entry.title+'\0').encode()) elif field_type == 0x0005: if entry.url is not None: return (len(entry.url.encode())+1, (entry.url+'\0').encode()) elif field_type == 0x0006: if entry.username is not None: return (len(entry.username.encode())+1, (entry.username+'\0').encode()) elif field_type == 0x0007: if entry.password is not None: return (len(entry.password.encode())+1, (entry.password+'\0').encode()) elif field_type == 0x0008: if entry.comment is not None: return (len(entry.comment.encode())+1, (entry.comment+'\0').encode()) elif field_type == 0x0009: if entry.creation is not None: return (5, self._pack_date(entry.creation)) elif field_type == 0x000A: if entry.last_mod is not None: return (5, self._pack_date(entry.last_mod)) elif field_type == 0x000B: if entry.last_access is not None: return (5, self._pack_date(entry.last_access)) elif field_type == 0x000C: if entry.expire is not None: return (5, self._pack_date(entry.expire)) elif field_type == 0x000D: if entry.binary_desc is not None: return (len(entry.binary_desc.encode())+1, (entry.binary_desc+'\0').encode()) elif field_type == 0x000E: if entry.binary is not None: return (len(entry.binary), entry.binary) return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def getsecret(self, section, option, **kwargs): """Get a secret from Custodia """
# keyword-only arguments, vars and fallback are directly passed through raw = kwargs.get('raw', False) value = self.get(section, option, **kwargs) if raw: return value return self.custodia_client.get_secret(value)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _load_plugin_class(menu, name): """Load Custodia plugin Entry points are preferred over dotted import path. """
group = 'custodia.{}'.format(menu) eps = list(pkg_resources.iter_entry_points(group, name)) if len(eps) > 1: raise ValueError( "Multiple entry points for {} {}: {}".format(menu, name, eps)) elif len(eps) == 1: # backwards compatibility with old setuptools ep = eps[0] if hasattr(ep, 'resolve'): return ep.resolve() else: return ep.load(require=False) elif '.' in name: # fall back to old style dotted name module, classname = name.rsplit('.', 1) m = importlib.import_module(module) return getattr(m, classname) else: raise ValueError("{}: {} not found".format(menu, name))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _load_plugins(config, cfgparser): """Load and initialize plugins """
# set umask before any plugin gets a chance to create a file os.umask(config['umask']) for s in cfgparser.sections(): if s in {'ENV', 'global'}: # ENV section is only used for interpolation continue if s.startswith('/'): menu = 'consumers' path_chain = s.split('/') if path_chain[-1] == '': path_chain = path_chain[:-1] name = tuple(path_chain) else: if s.startswith('auth:'): menu = 'authenticators' name = s[5:] elif s.startswith('authz:'): menu = 'authorizers' name = s[6:] elif s.startswith('store:'): menu = 'stores' name = s[6:] else: raise ValueError('Invalid section name [%s].\n' % s) try: config[menu][name] = _create_plugin(cfgparser, s, menu) except Exception as e: logger.debug("Plugin '%s' failed to load.", name, exc_info=True) raise RuntimeError(menu, name, e) # 2nd initialization stage for menu in ['authenticators', 'authorizers', 'consumers', 'stores']: plugins = config[menu] for name in sorted(plugins): plugin = plugins[name] plugin.finalize_init(config, cfgparser, context=None)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get(self, po): """Lookup value for a PluginOption instance Args: po: PluginOption Returns: converted value """
name = po.name typ = po.typ default = po.default handler = getattr(self, '_get_{}'.format(typ), None) if handler is None: raise ValueError(typ) self.seen.add(name) # pylint: disable=not-callable if not self.parser.has_option(self.section, name): if default is REQUIRED: raise NameError(self.section, name) if isinstance(default, INHERIT_GLOBAL): return handler('global', name, default.default) # don't return default here, give the handler a chance to modify # the default, e.g. pw_uid with default='root' returns 0. return handler(self.section, name, default)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def parse(self, msg, name): """Parses a simple message :param msg: the json-decoded value :param name: the requested name :raises UnknownMessageType: if the type is not 'simple' :raises InvalidMessage: if the message cannot be parsed or validated """
# On requests we imply 'simple' if there is no input message if msg is None: return if not isinstance(msg, string_types): raise InvalidMessage("The 'value' attribute is not a string") self.name = name self.payload = msg self.msg_type = 'simple'
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def krb5_unparse_principal_name(name): """Split a Kerberos principal name into parts Returns: * ('host', hostname, realm) for a host principal * (servicename, hostname, realm) for a service principal * (None, username, realm) for a user principal :param text name: Kerberos principal name :return: (service, host, realm) or (None, username, realm) """
prefix, realm = name.split(u'@') if u'/' in prefix: service, host = prefix.rsplit(u'/', 1) return service, host, realm else: return None, prefix, realm
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def parse(self, msg, name): """Parses the message. We check that the message is properly formatted. :param msg: a json-encoded value containing a JWS or JWE+JWS token :raises InvalidMessage: if the message cannot be parsed or validated :returns: A verified payload """
try: jtok = JWT(jwt=msg) except Exception as e: raise InvalidMessage('Failed to parse message: %s' % str(e)) try: token = jtok.token if isinstance(token, JWE): token.decrypt(self.kkstore.server_keys[KEY_USAGE_ENC]) # If an encrypted payload is received then there must be # a nested signed payload to verify the provenance. payload = token.payload.decode('utf-8') token = JWS() token.deserialize(payload) elif isinstance(token, JWS): pass else: raise TypeError("Invalid Token type: %s" % type(jtok)) # Retrieve client keys for later use self.client_keys = [ JWK(**self._get_key(token.jose_header, KEY_USAGE_SIG)), JWK(**self._get_key(token.jose_header, KEY_USAGE_ENC))] # verify token and get payload token.verify(self.client_keys[KEY_USAGE_SIG]) claims = json_decode(token.payload) except Exception as e: logger.debug('Failed to validate message', exc_info=True) raise InvalidMessage('Failed to validate message: %s' % str(e)) check_kem_claims(claims, name) self.name = name self.payload = claims.get('value') self.msg_type = 'kem' return {'type': self.msg_type, 'value': {'kid': self.client_keys[KEY_USAGE_ENC].key_id, 'claims': claims}}
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def instance_name(string): """Check for valid instance name """
invalid = ':/@' if set(string).intersection(invalid): msg = 'Invalid instance name {}'.format(string) raise argparse.ArgumentTypeError(msg) return string
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def copy_magic_into_pyc(input_pyc, output_pyc, src_version, dest_version): """Bytecodes are the same except the magic number, so just change that"""
(version, timestamp, magic_int, co, is_pypy, source_size) = load_module(input_pyc) assert version == float(src_version), ( "Need Python %s bytecode; got bytecode for version %s" % (src_version, version)) magic_int = magic2int(magics[dest_version]) write_bytecode_file(output_pyc, co, magic_int) print("Wrote %s" % output_pyc) return
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def transform_26_27(inst, new_inst, i, n, offset, instructions, new_asm): """Change JUMP_IF_FALSE and JUMP_IF_TRUE to POP_JUMP_IF_FALSE and POP_JUMP_IF_TRUE"""
if inst.opname in ('JUMP_IF_FALSE', 'JUMP_IF_TRUE'): i += 1 assert i < n assert instructions[i].opname == 'POP_TOP' new_inst.offset = offset new_inst.opname = ( 'POP_JUMP_IF_FALSE' if inst.opname == 'JUMP_IF_FALSE' else 'POP_JUMP_IF_TRUE' ) new_asm.backpatch[-1].remove(inst) new_inst.arg = 'L%d' % (inst.offset + inst.arg + 3) new_asm.backpatch[-1].add(new_inst) else: xlate26_27(new_inst) return xdis.op_size(new_inst.opcode, opcode_27)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def transform_32_33(inst, new_inst, i, n, offset, instructions, new_asm): """MAKEFUNCTION adds another const. probably MAKECLASS as well """
add_size = xdis.op_size(new_inst.opcode, opcode_33) if inst.opname in ('MAKE_FUNCTION','MAKE_CLOSURE'): # Previous instruction should be a load const which # contains the name of the function to call prev_inst = instructions[i-1] assert prev_inst.opname == 'LOAD_CONST' assert isinstance(prev_inst.arg, int) # Add the function name as an additional LOAD_CONST load_fn_const = Instruction() load_fn_const.opname = 'LOAD_CONST' load_fn_const.opcode = opcode_33.opmap['LOAD_CONST'] load_fn_const.line_no = None prev_const = new_asm.code.co_consts[prev_inst.arg] if hasattr(prev_const, 'co_name'): fn_name = new_asm.code.co_consts[prev_inst.arg].co_name else: fn_name = 'what-is-up' const_index = len(new_asm.code.co_consts) new_asm.code.co_consts = list(new_asm.code.co_consts) new_asm.code.co_consts.append(fn_name) load_fn_const.arg = const_index load_fn_const.offset = offset load_fn_const.starts_line = False load_fn_const.is_jump_target = False new_asm.code.instructions.append(load_fn_const) load_const_size = xdis.op_size(load_fn_const.opcode, opcode_33) add_size += load_const_size new_inst.offset = offset + add_size pass return add_size
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def transform_33_32(inst, new_inst, i, n, offset, instructions, new_asm): """MAKE_FUNCTION, and MAKE_CLOSURE have an additional LOAD_CONST of a name that are not in Python 3.2. Remove these. """
add_size = xdis.op_size(new_inst.opcode, opcode_33) if inst.opname in ('MAKE_FUNCTION','MAKE_CLOSURE'): # Previous instruction should be a load const which # contains the name of the function to call prev_inst = instructions[i-1] assert prev_inst.opname == 'LOAD_CONST' assert isinstance(prev_inst.arg, int) assert len(instructions) > 2 assert len(instructions) > 2 prev_inst2 = instructions[i-2] assert prev_inst2.opname == 'LOAD_CONST' assert isinstance(prev_inst2.arg, int) # Remove the function name as an additional LOAD_CONST prev2_const = new_asm.code.co_consts[prev_inst.arg] assert hasattr(prev2_const, 'co_name') new_asm.code.instructions = new_asm.code.instructions[:-1] load_const_size = xdis.op_size(prev_inst.opcode, opcode_33) add_size -= load_const_size new_inst.offset = offset - add_size return -load_const_size return 0
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def main(conversion_type, input_pyc, output_pyc): """Convert Python bytecode from one version to another. INPUT_PYC contains the input bytecode path name OUTPUT_PYC contians the output bytecode path name if supplied The --conversion type option specifies what conversion to do. Note: there are a very limited set of conversions currently supported. Help out and write more!"""
shortname = osp.basename(input_pyc) if shortname.endswith('.pyc'): shortname = shortname[:-4] src_version = conversion_to_version(conversion_type, is_dest=False) dest_version = conversion_to_version(conversion_type, is_dest=True) if output_pyc is None: output_pyc = "%s-%s.pyc" % (shortname, dest_version) if conversion_type in UPWARD_COMPATABLE: copy_magic_into_pyc(input_pyc, output_pyc, src_version, dest_version) return temp_asm = NamedTemporaryFile('w', suffix='.pyasm', prefix=shortname, delete=False) (filename, co, version, timestamp, magic_int) = disassemble_file(input_pyc, temp_asm, asm_format=True) temp_asm.close() assert version == float(src_version), ( "Need Python %s bytecode; got bytecode for version %s" % (src_version, version)) asm = asm_file(temp_asm.name) new_asm = transform_asm(asm, conversion_type, src_version, dest_version) os.unlink(temp_asm.name) write_pycfile(output_pyc, new_asm)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def generate(self, str=None, fpath=None): """generates fingerprints of the input. Either provide `str` to compute fingerprint directly from your string or `fpath` to compute fingerprint from the text of the file. Make sure to have your text decoded in `utf-8` format if you pass the input string. Args: str (Optional(str)): string whose fingerprint is to be computed. fpath (Optional(str)): absolute path of the text file whose fingerprint is to be computed. Returns: List(int): fingerprints of the input. Raises: FingerprintException: If the input string do not meet the requirements of parameters provided for fingerprinting. """
self.prepare_storage() self.str = self.load_file(fpath) if fpath else self.sanitize(str) self.validate_config() self.generate_kgrams() self.hash_kgrams() self.generate_fingerprints() return self.fingerprints
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def main(pyc_file, asm_path): """ Create Python bytecode from a Python assembly file. ASM_PATH gives the input Python assembly file. We suggest ending the file in .pyc If --pyc-file is given, that indicates the path to write the Python bytecode. The path should end in '.pyc'. See https://github.com/rocky/python-xasm/blob/master/HOW-TO-USE.rst for how to write a Python assembler file. """
if os.stat(asm_path).st_size == 0: print("Size of assembly file %s is zero" % asm_path) sys.exit(1) asm = asm_file(asm_path) if not pyc_file and asm_path.endswith('.pyasm'): pyc_file = asm_path[:-len('.pyasm')] + '.pyc' write_pycfile(pyc_file, asm)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def expire(self, secs): """ Adds the standard 'exp' field, used to prevent replay attacks. Adds the 'exp' field to the payload. When a request is made, the field says that it should expire at now + `secs` seconds. Of course, this provides no protection unless the server reads and interprets this field. """
self.add_field('exp', lambda req: int(time.time() + secs))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _generate(self, request): """ Generate a payload for the given request. """
payload = {} for field, gen in self._generators.items(): value = None if callable(gen): value = gen(request) else: value = gen if value: payload[field] = value return payload
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def url2fs(url): """ encode a URL to be safe as a filename """
uri, extension = posixpath.splitext(url) return safe64.dir(uri) + extension
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def is_merc_projection(srs): """ Return true if the map projection matches that used by VEarth, Google, OSM, etc. Is currently necessary for zoom-level shorthand for scale-denominator. """
if srs.lower() == '+init=epsg:900913': return True # observed srs = dict([p.split('=') for p in srs.split() if '=' in p]) # expected # note, common optional modifiers like +no_defs, +over, and +wkt # are not pairs and should not prevent matching gym = '+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0 +k=1.0 +units=m +nadgrids=@null' gym = dict([p.split('=') for p in gym.split() if '=' in p]) for p in gym: if srs.get(p, None) != gym.get(p, None): return False return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def extract_declarations(map_el, dirs, scale=1, user_styles=[]): """ Given a Map element and directories object, remove and return a complete list of style declarations from any Stylesheet elements found within. """
styles = [] # # First, look at all the stylesheets defined in the map itself. # for stylesheet in map_el.findall('Stylesheet'): map_el.remove(stylesheet) content, mss_href = fetch_embedded_or_remote_src(stylesheet, dirs) if content: styles.append((content, mss_href)) # # Second, look through the user-supplied styles for override rules. # for stylesheet in user_styles: mss_href = urljoin(dirs.source.rstrip('/')+'/', stylesheet) content = urllib.urlopen(mss_href).read().decode(DEFAULT_ENCODING) styles.append((content, mss_href)) declarations = [] for (content, mss_href) in styles: is_merc = is_merc_projection(map_el.get('srs','')) for declaration in stylesheet_declarations(content, is_merc, scale): # # Change the value of each URI relative to the location # of the containing stylesheet. We generally just have # the one instance of "dirs" around for a full parse cycle, # so it's necessary to perform this normalization here # instead of later, while mss_href is still available. # uri_value = declaration.value.value if uri_value.__class__ is uri: uri_value.address = urljoin(mss_href, uri_value.address) declarations.append(declaration) return declarations
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def is_applicable_selector(selector, filter): """ Given a Selector and Filter, return True if the Selector is compatible with the given Filter, and False if they contradict. """
for test in selector.allTests(): if not test.isCompatible(filter.tests): return False return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_polygon_rules(declarations): """ Given a Map element, a Layer element, and a list of declarations, create a new Style element with a PolygonSymbolizer, add it to Map and refer to it in Layer. """
property_map = {'polygon-fill': 'fill', 'polygon-opacity': 'fill-opacity', 'polygon-gamma': 'gamma', 'polygon-meta-output': 'meta-output', 'polygon-meta-writer': 'meta-writer'} property_names = property_map.keys() # a place to put rules rules = [] for (filter, values) in filtered_property_declarations(declarations, property_names): color = values.has_key('polygon-fill') and values['polygon-fill'].value opacity = values.has_key('polygon-opacity') and values['polygon-opacity'].value or None gamma = values.has_key('polygon-gamma') and values['polygon-gamma'].value or None symbolizer = color and output.PolygonSymbolizer(color, opacity, gamma) if symbolizer: rules.append(make_rule(filter, symbolizer)) return rules
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_raster_rules(declarations): """ Given a Map element, a Layer element, and a list of declarations, create a new Style element with a RasterSymbolizer, add it to Map and refer to it in Layer. The RasterSymbolizer will always created, even if there are no applicable declarations. """
property_map = {'raster-opacity': 'opacity', 'raster-mode': 'mode', 'raster-scaling': 'scaling' } property_names = property_map.keys() # a place to put rules rules = [] for (filter, values) in filtered_property_declarations(declarations, property_names): sym_params = {} for prop,attr in property_map.items(): sym_params[attr] = values.has_key(prop) and values[prop].value or None symbolizer = output.RasterSymbolizer(**sym_params) rules.append(make_rule(filter, symbolizer)) if not rules: # No raster-* rules were created, but we're here so we must need a symbolizer. rules.append(make_rule(Filter(), output.RasterSymbolizer())) return rules
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def locally_cache_remote_file(href, dir): """ Locally cache a remote resource using a predictable file name and awareness of modification date. Assume that files are "normal" which is to say they have filenames with extensions. """
scheme, host, remote_path, params, query, fragment = urlparse(href) assert scheme in ('http','https'), 'Scheme must be either http or https, not "%s" (for %s)' % (scheme,href) head, ext = posixpath.splitext(posixpath.basename(remote_path)) head = sub(r'[^\w\-_]', '', head) hash = md5(href).hexdigest()[:8] local_path = '%(dir)s/%(host)s-%(hash)s-%(head)s%(ext)s' % locals() headers = {} if posixpath.exists(local_path): msg('Found local file: %s' % local_path ) t = localtime(os.stat(local_path).st_mtime) headers['If-Modified-Since'] = strftime('%a, %d %b %Y %H:%M:%S %Z', t) if scheme == 'https': conn = HTTPSConnection(host, timeout=5) else: conn = HTTPConnection(host, timeout=5) if query: remote_path += '?%s' % query conn.request('GET', remote_path, headers=headers) resp = conn.getresponse() if resp.status in range(200, 210): # hurrah, it worked f = open(un_posix(local_path), 'wb') msg('Reading from remote: %s' % remote_path) f.write(resp.read()) f.close() elif resp.status in (301, 302, 303) and resp.getheader('location', False): # follow a redirect, totally untested. redirected_href = urljoin(href, resp.getheader('location')) redirected_path = locally_cache_remote_file(redirected_href, dir) os.rename(redirected_path, local_path) elif resp.status == 304: # hurrah, it's cached msg('Reading directly from local cache') pass else: raise Exception("Failed to get remote resource %s: %s" % (href, resp.status)) return local_path
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def post_process_symbolizer_image_file(file_href, dirs): """ Given an image file href and a set of directories, modify the image file name so it's correct with respect to the output and cache directories. """
# support latest mapnik features of auto-detection # of image sizes and jpeg reading support... # http://trac.mapnik.org/ticket/508 mapnik_auto_image_support = (MAPNIK_VERSION >= 701) mapnik_requires_absolute_paths = (MAPNIK_VERSION < 601) file_href = urljoin(dirs.source.rstrip('/')+'/', file_href) scheme, n, path, p, q, f = urlparse(file_href) if scheme in ('http','https'): scheme, path = '', locally_cache_remote_file(file_href, dirs.cache) if scheme not in ('file', '') or not systempath.exists(un_posix(path)): raise Exception("Image file needs to be a working, fetchable resource, not %s" % file_href) if not mapnik_auto_image_support and not Image: raise SystemExit('PIL (Python Imaging Library) is required for handling image data unless you are using PNG inputs and running Mapnik >=0.7.0') img = Image.open(un_posix(path)) if mapnik_requires_absolute_paths: path = posixpath.realpath(path) else: path = dirs.output_path(path) msg('reading symbol: %s' % path) image_name, ext = posixpath.splitext(path) if ext in ('.png', '.tif', '.tiff'): output_ext = ext else: output_ext = '.png' # new local file name dest_file = un_posix('%s%s' % (image_name, output_ext)) if not posixpath.exists(dest_file): img.save(dest_file,'PNG') msg('Destination file: %s' % dest_file) return dest_file, output_ext[1:], img.size[0], img.size[1]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def localize_shapefile(shp_href, dirs): """ Given a shapefile href and a set of directories, modify the shapefile name so it's correct with respect to the output and cache directories. """
# support latest mapnik features of auto-detection # of image sizes and jpeg reading support... # http://trac.mapnik.org/ticket/508 mapnik_requires_absolute_paths = (MAPNIK_VERSION < 601) shp_href = urljoin(dirs.source.rstrip('/')+'/', shp_href) scheme, host, path, p, q, f = urlparse(shp_href) if scheme in ('http','https'): msg('%s | %s' % (shp_href, dirs.cache)) scheme, path = '', locally_cache_remote_file(shp_href, dirs.cache) else: host = None # collect drive for windows to_posix(systempath.realpath(path)) if scheme not in ('file', ''): raise Exception("Shapefile needs to be local, not %s" % shp_href) if mapnik_requires_absolute_paths: path = posixpath.realpath(path) original = path path = dirs.output_path(path) if path.endswith('.zip'): # unzip_shapefile_into needs a path it can find path = posixpath.join(dirs.output, path) path = unzip_shapefile_into(path, dirs.cache, host) return dirs.output_path(path)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def localize_file_datasource(file_href, dirs): """ Handle localizing file-based datasources other than shapefiles. This will only work for single-file based types. """
# support latest mapnik features of auto-detection # of image sizes and jpeg reading support... # http://trac.mapnik.org/ticket/508 mapnik_requires_absolute_paths = (MAPNIK_VERSION < 601) file_href = urljoin(dirs.source.rstrip('/')+'/', file_href) scheme, n, path, p, q, f = urlparse(file_href) if scheme in ('http','https'): scheme, path = '', locally_cache_remote_file(file_href, dirs.cache) if scheme not in ('file', ''): raise Exception("Datasource file needs to be a working, fetchable resource, not %s" % file_href) if mapnik_requires_absolute_paths: return posixpath.realpath(path) else: return dirs.output_path(path)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def midpoint(self): """ Return a point guranteed to fall within this range, hopefully near the middle. """
minpoint = self.leftedge if self.leftop is gt: minpoint += 1 maxpoint = self.rightedge if self.rightop is lt: maxpoint -= 1 if minpoint is None: return maxpoint elif maxpoint is None: return minpoint else: return (minpoint + maxpoint) / 2
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def isOpen(self): """ Return true if this range has any room in it. """
if self.leftedge and self.rightedge and self.leftedge > self.rightedge: return False if self.leftedge == self.rightedge: if self.leftop is gt or self.rightop is lt: return False return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def toFilter(self, property): """ Convert this range to a Filter with a tests having a given property. """
if self.leftedge == self.rightedge and self.leftop is ge and self.rightop is le: # equivalent to == return Filter(style.SelectorAttributeTest(property, '=', self.leftedge)) try: return Filter(style.SelectorAttributeTest(property, opstr[self.leftop], self.leftedge), style.SelectorAttributeTest(property, opstr[self.rightop], self.rightedge)) except KeyError: try: return Filter(style.SelectorAttributeTest(property, opstr[self.rightop], self.rightedge)) except KeyError: try: return Filter(style.SelectorAttributeTest(property, opstr[self.leftop], self.leftedge)) except KeyError: return Filter()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def isOpen(self): """ Return true if this filter is not trivially false, i.e. self-contradictory. """
equals = {} nequals = {} for test in self.tests: if test.op == '=': if equals.has_key(test.property) and test.value != equals[test.property]: # we've already stated that this arg must equal something else return False if nequals.has_key(test.property) and test.value in nequals[test.property]: # we've already stated that this arg must not equal its current value return False equals[test.property] = test.value if test.op == '!=': if equals.has_key(test.property) and test.value == equals[test.property]: # we've already stated that this arg must equal its current value return False if not nequals.has_key(test.property): nequals[test.property] = set() nequals[test.property].add(test.value) return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def minusExtras(self): """ Return a new Filter that's equal to this one, without extra terms that don't add meaning. """
assert self.isOpen() trimmed = self.clone() equals = {} for test in trimmed.tests: if test.op == '=': equals[test.property] = test.value extras = [] for (i, test) in enumerate(trimmed.tests): if test.op == '!=' and equals.has_key(test.property) and equals[test.property] != test.value: extras.append(i) while extras: trimmed.tests.pop(extras.pop()) return trimmed
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_preference(hdf5_file, preference): """Assign the value 'preference' to the diagonal entries of the matrix of similarities stored in the HDF5 data structure at 'hdf5_file'. """
Worker.hdf5_lock.acquire() with tables.open_file(hdf5_file, 'r+') as fileh: S = fileh.root.aff_prop_group.similarities diag_ind = np.diag_indices(S.nrows) S[diag_ind] = preference Worker.hdf5_lock.release()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_fluctuations(hdf5_file, N_columns, N_processes): """This procedure organizes the addition of small fluctuations on top of a matrix of similarities at 'hdf5_file' across 'N_processes' different processes. Each of those processes is an instance of the class 'Fluctuations_Worker' defined elsewhere in this module. """
random_state = np.random.RandomState(0) slice_queue = multiprocessing.JoinableQueue() pid_list = [] for i in range(N_processes): worker = Fluctuations_worker(hdf5_file, '/aff_prop_group/similarities', random_state, N_columns, slice_queue) worker.daemon = True worker.start() pid_list.append(worker.pid) for rows_slice in chunk_generator(N_columns, 4 * N_processes): slice_queue.put(rows_slice) slice_queue.join() slice_queue.close() terminate_processes(pid_list) gc.collect()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def compute_responsibilities(hdf5_file, N_columns, damping, N_processes): """Organize the computation and update of the responsibility matrix for Affinity Propagation clustering with 'damping' as the eponymous damping parameter. Each of the processes concurrently involved in this task is an instance of the class 'Responsibilities_worker' defined above. """
slice_queue = multiprocessing.JoinableQueue() pid_list = [] for i in range(N_processes): worker = Responsibilities_worker(hdf5_file, '/aff_prop_group', N_columns, damping, slice_queue) worker.daemon = True worker.start() pid_list.append(worker.pid) for rows_slice in chunk_generator(N_columns, 8 * N_processes): slice_queue.put(rows_slice) slice_queue.join() slice_queue.close() terminate_processes(pid_list)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def to_numpy_array(multiprocessing_array, shape, dtype): """Convert a share multiprocessing array to a numpy array. No data copying involved. """
return np.frombuffer(multiprocessing_array.get_obj(), dtype = dtype).reshape(shape)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def compute_rows_sum(hdf5_file, path, N_columns, N_processes, method = 'Process'): """Parallel computation of the sums across the rows of two-dimensional array accessible at the node specified by 'path' in the 'hdf5_file' hierarchical data format. """
assert isinstance(method, str), "parameter 'method' must consist in a string of characters" assert method in ('Ordinary', 'Pool'), "parameter 'method' must be set to either of 'Ordinary' or 'Pool'" if method == 'Ordinary': rows_sum = np.zeros(N_columns, dtype = float) chunk_size = get_chunk_size(N_columns, 2) with Worker.hdf5_lock: with tables.open_file(hdf5_file, 'r+') as fileh: hdf5_array = fileh.get_node(path) N_rows = hdf5_array.nrows assert N_columns == N_rows for i in range(0, N_columns, chunk_size): slc = slice(i, min(i+chunk_size, N_columns)) tmp = hdf5_array[:, slc] rows_sum[slc] = tmp[:].sum(axis = 0) else: rows_sum_array = multiprocessing.Array(c_double, N_columns, lock = True) chunk_size = get_chunk_size(N_columns, 2 * N_processes) numpy_args = rows_sum_array, N_columns, np.float64 with closing(multiprocessing.Pool(N_processes, initializer = rows_sum_init, initargs = (hdf5_file, path, rows_sum_array.get_lock()) + numpy_args)) as pool: pool.map_async(multiprocessing_get_sum, chunk_generator(N_columns, 2 * N_processes), chunk_size) pool.close() pool.join() rows_sum = to_numpy_array(*numpy_args) gc.collect() return rows_sum
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def check_convergence(hdf5_file, iteration, convergence_iter, max_iter): """If the estimated number of clusters has not changed for 'convergence_iter' consecutive iterations in a total of 'max_iter' rounds of message-passing, the procedure herewith returns 'True'. Otherwise, returns 'False'. Parameter 'iteration' identifies the run of message-passing that has just completed. """
Worker.hdf5_lock.acquire() with tables.open_file(hdf5_file, 'r+') as fileh: A = fileh.root.aff_prop_group.availabilities R = fileh.root.aff_prop_group.responsibilities P = fileh.root.aff_prop_group.parallel_updates N = A.nrows diag_ind = np.diag_indices(N) E = (A[diag_ind] + R[diag_ind]) > 0 P[:, iteration % convergence_iter] = E e_mat = P[:] K = E.sum(axis = 0) Worker.hdf5_lock.release() if iteration >= convergence_iter: se = e_mat.sum(axis = 1) unconverged = (np.sum((se == convergence_iter) + (se == 0)) != N) if (not unconverged and (K > 0)) or (iteration == max_iter): return True return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def cluster_labels_A(hdf5_file, c, lock, I, rows_slice): """One of the task to be performed by a pool of subprocesses, as the first step in identifying the cluster labels and indices of the cluster centers for Affinity Propagation clustering. """
with Worker.hdf5_lock: with tables.open_file(hdf5_file, 'r+') as fileh: S = fileh.root.aff_prop_group.similarities s = S[rows_slice, ...] s = np.argmax(s[:, I], axis = 1) with lock: c[rows_slice] = s[:] del s
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def cluster_labels_B(hdf5_file, s_reduced, lock, I, ii, iix, rows_slice): """Second task to be performed by a pool of subprocesses before the cluster labels and cluster center indices can be identified. """
with Worker.hdf5_lock: with tables.open_file(hdf5_file, 'r+') as fileh: S = fileh.root.aff_prop_group.similarities s = S[rows_slice, ...] s = s[:, ii] s = s[iix[rows_slice]] with lock: s_reduced += s[:].sum(axis = 0) del s
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def output_clusters(labels, cluster_centers_indices): """Write in tab-separated files the vectors of cluster identities and of indices of cluster centers. """
here = os.getcwd() try: output_directory = os.path.join(here, 'concurrent_AP_output') os.makedirs(output_directory) except OSError: if not os.path.isdir(output_directory): print("ERROR: concurrent_AP: output_clusters: cannot create a directory " "for storage of the results of Affinity Propagation clustering " "in your current working directory") sys.exit(1) if any(np.isnan(labels)): fmt = '%.1f' else: fmt = '%d' with open(os.path.join(output_directory, 'labels.tsv'), 'w') as fh: np.savetxt(fh, labels, fmt = fmt, delimiter = '\t') if cluster_centers_indices is not None: with open(os.path.join(output_directory, 'cluster_centers_indices.tsv'), 'w') as fh: np.savetxt(fh, cluster_centers_indices, fmt = '%.1f', delimiter = '\t')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_coin_snapshot(fsym, tsym): """ Get blockchain information, aggregated data as well as data for the individual exchanges available for the specified currency pair. Args: fsym: FROM symbol. tsym: TO symbol. Returns: The function returns a dictionairy containing blockain as well as trading information from the different exchanges were the specified currency pair is available. {'AggregatedData': dict, """
# load data url = build_url('coinsnapshot', fsym=fsym, tsym=tsym) data = load_data(url)['Data'] return data
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def matches(self, tag, id, classes): """ Given an id and a list of classes, return True if this selector would match. """
element = self.elements[0] unmatched_ids = [name[1:] for name in element.names if name.startswith('#')] unmatched_classes = [name[1:] for name in element.names if name.startswith('.')] unmatched_tags = [name for name in element.names if name is not '*' and not name.startswith('#') and not name.startswith('.')] if tag and tag in unmatched_tags: unmatched_tags.remove(tag) if id and id in unmatched_ids: unmatched_ids.remove(id) for class_ in classes: if class_ in unmatched_classes: unmatched_classes.remove(class_) if unmatched_tags or unmatched_ids or unmatched_classes: return False else: return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def scaledBy(self, scale): """ Return a new Selector with scale denominators scaled by a number. """
scaled = deepcopy(self) for test in scaled.elements[0].tests: if type(test.value) in (int, float): if test.property == 'scale-denominator': test.value /= scale elif test.property == 'zoom': test.value += log(scale)/log(2) return scaled
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def scaledBy(self, scale): """ Return a new Value scaled by a given number for ints and floats. """
scaled = deepcopy(self) if type(scaled.value) in (int, float): scaled.value *= scale elif isinstance(scaled.value, numbers): scaled.value.values = tuple(v * scale for v in scaled.value.values) return scaled
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_mining_contracts(): """ Get all the mining contracts information available. Returns: This function returns two major dictionaries. The first one contains information about the coins for which mining contracts data is available: coin_data: The other one contains all the available mining contracts: mining_data: 'Id': id1, """
# load data url = build_url('miningcontracts') data = load_data(url) coin_data = data['CoinData'] mining_data = data['MiningData'] return coin_data, mining_data
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_mining_equipment(): """Get all the mining equipment information available. Returns: This function returns two major dictionaries. The first one contains information about the coins for which mining equipment data is available. coin_data: The other one contains all the available mining equipment. mining_data: """
# load data url = build_url('miningequipment') data = load_data(url) coin_data = data['CoinData'] mining_data = data['MiningData'] return coin_data, mining_data
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def main(src_file, dest_file, **kwargs): """ Given an input layers file and a directory, print the compiled XML file to stdout and save any encountered external image files to the named directory. """
mmap = mapnik.Map(1, 1) # allow [zoom] filters to work mmap.srs = '+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0 +k=1.0 +units=m +nadgrids=@null' load_kwargs = dict([(k, v) for (k, v) in kwargs.items() if k in ('cache_dir', 'scale', 'verbose', 'datasources_cfg', 'user_styles')]) cascadenik.load_map(mmap, src_file, dirname(realpath(dest_file)), **load_kwargs) (handle, tmp_file) = tempfile.mkstemp(suffix='.xml', prefix='cascadenik-mapnik-') os.close(handle) mapnik.save_map(mmap, tmp_file) if kwargs.get('pretty'): doc = ElementTree.fromstring(open(tmp_file, 'rb').read()) cascadenik._compile.indent(doc) f = open(tmp_file, 'wb') ElementTree.ElementTree(doc).write(f) f.close() # manually unlinking seems to be required on windows if os.path.exists(dest_file): os.unlink(dest_file) os.chmod(tmp_file, 0666^os.umask(0)) shutil.move(tmp_file, dest_file) return 0
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def chunk(url): """ create filesystem-safe places for url-keyed data to be stored """
chunks = lambda l, n: [l[x: x+n] for x in xrange(0, len(l), n)] url_64 = base64.urlsafe_b64encode(url) return chunks(url_64, 255)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def main(filename): """ Given an input file containing nothing but styles, print out an unrolled list of declarations in cascade order. """
input = open(filename, 'r').read() declarations = cascadenik.stylesheet_declarations(input, is_merc=True) for dec in declarations: print dec.selector, print '{', print dec.property.name+':', if cascadenik.style.properties[dec.property.name] in (cascadenik.style.color, cascadenik.style.boolean, cascadenik.style.numbers): print str(dec.value.value)+';', elif cascadenik.style.properties[dec.property.name] is cascadenik.style.uri: print 'url("'+str(dec.value.value)+'");', elif cascadenik.style.properties[dec.property.name] is str: print '"'+str(dec.value.value)+'";', elif cascadenik.style.properties[dec.property.name] in (int, float) or type(cascadenik.style.properties[dec.property.name]) is tuple: print str(dec.value.value)+';', print '}' return 0
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def validate_gps(value): """Validate GPS value."""
try: latitude, longitude, altitude = value.split(',') vol.Coerce(float)(latitude) vol.Coerce(float)(longitude) vol.Coerce(float)(altitude) except (TypeError, ValueError, vol.Invalid): raise vol.Invalid( 'GPS value should be of format "latitude,longitude,altitude"') return value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _connect(self): """Connect to socket. This should be run in a new thread."""
while self.protocol: _LOGGER.info('Trying to connect to %s', self.server_address) try: sock = socket.create_connection( self.server_address, self.reconnect_timeout) except socket.timeout: _LOGGER.error( 'Connecting to socket timed out for %s', self.server_address) _LOGGER.info( 'Waiting %s secs before trying to connect again', self.reconnect_timeout) time.sleep(self.reconnect_timeout) except OSError: _LOGGER.error( 'Failed to connect to socket at %s', self.server_address) _LOGGER.info( 'Waiting %s secs before trying to connect again', self.reconnect_timeout) time.sleep(self.reconnect_timeout) else: self.tcp_check_timer = time.time() self.tcp_disconnect_timer = time.time() transport = TCPTransport( sock, lambda: self.protocol, self._check_connection) poll_thread = threading.Thread(target=self._poll_queue) self._stop_event.clear() poll_thread.start() transport.start() transport.connect() return
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _connect(self): """Connect to the socket."""
try: while True: _LOGGER.info('Trying to connect to %s', self.server_address) try: yield from asyncio.wait_for( self.loop.create_connection( lambda: self.protocol, *self.server_address), self.reconnect_timeout, loop=self.loop) self.tcp_check_timer = time.time() self.tcp_disconnect_timer = time.time() self._check_connection() return except asyncio.TimeoutError: _LOGGER.error( 'Connecting to socket timed out for %s', self.server_address) _LOGGER.info( 'Waiting %s secs before trying to connect again', self.reconnect_timeout) yield from asyncio.sleep( self.reconnect_timeout, loop=self.loop) except OSError: _LOGGER.error( 'Failed to connect to socket at %s', self.server_address) _LOGGER.info( 'Waiting %s secs before trying to connect again', self.reconnect_timeout) yield from asyncio.sleep( self.reconnect_timeout, loop=self.loop) except asyncio.CancelledError: _LOGGER.debug( 'Connect attempt to %s cancelled', self.server_address)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def run(self): """Transport thread loop."""
# pylint: disable=broad-except self.protocol = self.protocol_factory() try: self.protocol.connection_made(self) except Exception as exc: self.alive = False self.protocol.connection_lost(exc) self._connection_made.set() return error = None self._connection_made.set() while self.alive: data = None try: available_socks = self._check_socket() if available_socks[0]: data = self.sock.recv(120) except Exception as exc: error = exc break else: if data: try: self.protocol.data_received(data) except Exception as exc: error = exc break try: self._check_connection() except OSError as exc: error = exc break time.sleep(0.02) # short sleep to avoid burning 100% cpu self.alive = False self.protocol.connection_lost(error) self.protocol = None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def register(self, name): """Return decorator to register item with a specific name."""
def decorator(func): """Register decorated function.""" self[name] = func return func return decorator
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _handle_subscription(self, topics): """Handle subscription of topics."""
if not isinstance(topics, list): topics = [topics] for topic in topics: topic_levels = topic.split('/') try: qos = int(topic_levels[-2]) except ValueError: qos = 0 try: _LOGGER.debug('Subscribing to: %s, qos: %s', topic, qos) self._sub_callback(topic, self.recv, qos) except Exception as exception: # pylint: disable=broad-except _LOGGER.exception( 'Subscribe to %s failed: %s', topic, exception)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _init_topics(self): """Set up initial subscription of mysensors topics."""
_LOGGER.info('Setting up initial MQTT topic subscription') init_topics = [ '{}/+/+/0/+/+'.format(self._in_prefix), '{}/+/+/3/+/+'.format(self._in_prefix), ] self._handle_subscription(init_topics) if not self.persistence: return topics = [ '{}/{}/{}/{}/+/+'.format( self._in_prefix, str(sensor.sensor_id), str(child.id), msg_type) for sensor in self.sensors.values() for child in sensor.children.values() for msg_type in (int(self.const.MessageType.set), int(self.const.MessageType.req)) ] topics.extend([ '{}/{}/+/{}/+/+'.format( self._in_prefix, str(sensor.sensor_id), int(self.const.MessageType.stream)) for sensor in self.sensors.values()]) self._handle_subscription(topics)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _parse_mqtt_to_message(self, topic, payload, qos): """Parse a MQTT topic and payload. Return a mysensors command string. """
topic_levels = topic.split('/') topic_levels = not_prefix = topic_levels[-5:] prefix_end_idx = topic.find('/'.join(not_prefix)) - 1 prefix = topic[:prefix_end_idx] if prefix != self._in_prefix: return None if qos and qos > 0: ack = '1' else: ack = '0' topic_levels[3] = ack topic_levels.append(str(payload)) return ';'.join(topic_levels)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _parse_message_to_mqtt(self, data): """Parse a mysensors command string. Return a MQTT topic, payload and qos-level as a tuple. """
msg = Message(data, self) payload = str(msg.payload) msg.payload = '' # prefix/node/child/type/ack/subtype : payload return ('{}/{}'.format(self._out_prefix, msg.encode('/'))[:-2], payload, msg.ack)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _handle_presentation(self, msg): """Process a MQTT presentation message."""
ret_msg = handle_presentation(msg) if msg.child_id == 255 or ret_msg is None: return # this is a presentation of a child sensor topics = [ '{}/{}/{}/{}/+/+'.format( self._in_prefix, str(msg.node_id), str(msg.child_id), msg_type) for msg_type in (int(self.const.MessageType.set), int(self.const.MessageType.req)) ] topics.append('{}/{}/+/{}/+/+'.format( self._in_prefix, str(msg.node_id), int(self.const.MessageType.stream))) self._handle_subscription(topics)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def recv(self, topic, payload, qos): """Receive a MQTT message. Call this method when a message is received from the MQTT broker. """
data = self._parse_mqtt_to_message(topic, payload, qos) if data is None: return _LOGGER.debug('Receiving %s', data) self.add_job(self.logic, data)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def send(self, message): """Publish a command string to the gateway via MQTT."""
if not message: return topic, payload, qos = self._parse_message_to_mqtt(message) try: _LOGGER.debug('Publishing %s', message.strip()) self._pub_callback(topic, payload, qos, self._retain) except Exception as exception: # pylint: disable=broad-except _LOGGER.exception('Publish to %s failed: %s', topic, exception)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def contribute_to_class(self, cls, name, virtual_only=False): """ Cast to the correct value every """
super(RegexField, self).contribute_to_class(cls, name, virtual_only) setattr(cls, name, CastOnAssignDescriptor(self))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def run_validators(self, value): """ Make sure value is a string so it can run through django validators """
value = self.to_python(value) value = self.value_to_string(value) return super(RegexField, self).run_validators(value)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def validate_hex(value): """Validate that value has hex format."""
try: binascii.unhexlify(value) except Exception: raise vol.Invalid( '{} is not of hex format'.format(value)) return value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def validate_v_rgb(value): """Validate a V_RGB value."""
if len(value) != 6: raise vol.Invalid( '{} is not six characters long'.format(value)) return validate_hex(value)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def validate_v_rgbw(value): """Validate a V_RGBW value."""
if len(value) != 8: raise vol.Invalid( '{} is not eight characters long'.format(value)) return validate_hex(value)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def copy(self, **kwargs): """Copy a message, optionally replace attributes with kwargs."""
msg = Message(self.encode(), self.gateway) for key, val in kwargs.items(): setattr(msg, key, val) return msg
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def modify(self, **kwargs): """Modify and return message, replace attributes with kwargs."""
for key, val in kwargs.items(): setattr(self, key, val) return self
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def decode(self, data, delimiter=';'): """Decode a message from command string."""
try: list_data = data.rstrip().split(delimiter) self.payload = list_data.pop() (self.node_id, self.child_id, self.type, self.ack, self.sub_type) = [int(f) for f in list_data] except ValueError: _LOGGER.warning('Error decoding message from gateway, ' 'bad data received: %s', data.rstrip()) raise
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def encode(self, delimiter=';'): """Encode a command string from message."""
try: return delimiter.join([str(f) for f in [ self.node_id, self.child_id, int(self.type), self.ack, int(self.sub_type), self.payload, ]]) + '\n' except ValueError: _LOGGER.error('Error encoding message to gateway')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def validate(self, protocol_version): """Validate message."""
const = get_const(protocol_version) valid_node_ids = vol.All(vol.Coerce(int), vol.Range( min=0, max=BROADCAST_ID, msg='Not valid node_id: {}'.format( self.node_id))) valid_child_ids = vol.All(vol.Coerce(int), vol.Range( min=0, max=SYSTEM_CHILD_ID, msg='Not valid child_id: {}'.format( self.child_id))) if self.type in (const.MessageType.internal, const.MessageType.stream): valid_child_ids = vol.All(vol.Coerce(int), vol.In( [SYSTEM_CHILD_ID], msg='When message type is {}, child_id must be {}'.format( self.type, SYSTEM_CHILD_ID))) if (self.type == const.MessageType.internal and self.sub_type in [ const.Internal.I_ID_REQUEST, const.Internal.I_ID_RESPONSE]): valid_child_ids = vol.Coerce(int) valid_types = vol.All(vol.Coerce(int), vol.In( [member.value for member in const.VALID_MESSAGE_TYPES], msg='Not valid message type: {}'.format(self.type))) if self.child_id == SYSTEM_CHILD_ID: valid_types = vol.All(vol.Coerce(int), vol.In( [const.MessageType.presentation.value, const.MessageType.internal.value, const.MessageType.stream.value], msg=( 'When child_id is {}, {} is not a valid ' 'message type'.format(SYSTEM_CHILD_ID, self.type)))) valid_ack = vol.In([0, 1], msg='Not valid ack flag: {}'.format( self.ack)) valid_sub_types = vol.In( [member.value for member in const.VALID_MESSAGE_TYPES.get(self.type, [])], msg='Not valid message sub-type: {}'.format(self.sub_type)) valid_payload = const.VALID_PAYLOADS.get( self.type, {}).get(self.sub_type, '') schema = vol.Schema({ 'node_id': valid_node_ids, 'child_id': valid_child_ids, 'type': valid_types, 'ack': valid_ack, 'sub_type': valid_sub_types, 'payload': valid_payload}) to_validate = {attr: getattr(self, attr) for attr in schema.schema} return schema(to_validate)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _save_pickle(self, filename): """Save sensors to pickle file."""
with open(filename, 'wb') as file_handle: pickle.dump(self._sensors, file_handle, pickle.HIGHEST_PROTOCOL) file_handle.flush() os.fsync(file_handle.fileno())
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _load_pickle(self, filename): """Load sensors from pickle file."""
with open(filename, 'rb') as file_handle: self._sensors.update(pickle.load(file_handle))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _save_json(self, filename): """Save sensors to json file."""
with open(filename, 'w') as file_handle: json.dump(self._sensors, file_handle, cls=MySensorsJSONEncoder, indent=4) file_handle.flush() os.fsync(file_handle.fileno())
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _load_json(self, filename): """Load sensors from json file."""
with open(filename, 'r') as file_handle: self._sensors.update(json.load( file_handle, cls=MySensorsJSONDecoder))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def save_sensors(self): """Save sensors to file."""
if not self.need_save: return fname = os.path.realpath(self.persistence_file) exists = os.path.isfile(fname) dirname = os.path.dirname(fname) if (not os.access(dirname, os.W_OK) or exists and not os.access(fname, os.W_OK)): _LOGGER.error('Permission denied when writing to %s', fname) return split_fname = os.path.splitext(fname) tmp_fname = '{}.tmp{}'.format(split_fname[0], split_fname[1]) _LOGGER.debug('Saving sensors to persistence file %s', fname) self._perform_file_action(tmp_fname, 'save') if exists: os.rename(fname, self.persistence_bak) os.rename(tmp_fname, fname) if exists: os.remove(self.persistence_bak) self.need_save = False