text_prompt
stringlengths
157
13.1k
code_prompt
stringlengths
7
19.8k
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _resource_index(self, resource): """Get index for given resource. by default it will be `self.index`, but it can be overriden via app.config :param resource: resource name """
datasource = self.get_datasource(resource) indexes = self._resource_config(resource, 'INDEXES') or {} default_index = self._resource_config(resource, 'INDEX') return indexes.get(datasource[0], default_index)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _refresh_resource_index(self, resource): """Refresh index for given resource. :param resource: resource name """
if self._resource_config(resource, 'FORCE_REFRESH', True): self.elastic(resource).indices.refresh(self._resource_index(resource))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _resource_prefix(self, resource=None): """Get elastic prefix for given resource. Resource can specify ``elastic_prefix`` which behaves same like ``mongo_prefix``. """
px = 'ELASTICSEARCH' if resource and config.DOMAIN[resource].get('elastic_prefix'): px = config.DOMAIN[resource].get('elastic_prefix') return px
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def elastic(self, resource=None): """Get ElasticSearch instance for given resource."""
px = self._resource_prefix(resource) if px not in self.elastics: url = self._resource_config(resource, 'URL') assert url, 'no url for %s' % px self.elastics[px] = get_es(url, **self.kwargs) return self.elastics[px]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_md5sum(fname, chunk_size=1024): """ Returns the MD5 checksum of a file. Args: fname (str): Filename chunk_size (Optional[int]): Size (in Bytes) of the chunks that should be read in at once. Increasing chunk size reduces the number of reads required, but increases the memory usage. Defaults to 1024. Returns: The MD5 checksum of the file, which is a string. """
def iter_chunks(f): while True: chunk = f.read(chunk_size) if not chunk: break yield chunk sig = hashlib.md5() with open(fname, 'rb') as f: for chunk in iter_chunks(f): sig.update(chunk) # data = f.read() # return hashlib.md5(data).hexdigest() return sig.hexdigest()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def download_and_verify(url, md5sum, fname=None, chunk_size=1024, clobber=False, verbose=True): """ Download a file and verify the MD5 sum. Args: url (str): The URL to download. md5sum (str): The expected MD5 sum. fname (Optional[str]): The filename to store the downloaded file in. If `None`, infer the filename from the URL. Defaults to `None`. chunk_size (Optional[int]): Process in chunks of this size (in Bytes). Defaults to 1024. clobber (Optional[bool]): If `True`, any existing, identical file will be overwritten. If `False`, the MD5 sum of any existing file with the destination filename will be checked. If the MD5 sum does not match, the existing file will be overwritten. Defaults to `False`. verbose (Optional[bool]): If `True` (the default), then a progress bar will be shownd during downloads. Returns: The filename the URL was downloaded to. Raises: DownloadError: The MD5 sum of the downloaded file does not match `md5sum`. requests.exceptions.HTTPError: There was a problem connecting to the URL. """
# Determine the filename if fname is None: fname = url.split('/')[-1] # Check if the file already exists on disk if (not clobber) and os.path.isfile(fname): print('Checking existing file to see if MD5 sum matches ...') md5_existing = get_md5sum(fname, chunk_size=chunk_size) if md5_existing == md5sum: print('File exists. Not overwriting.') return fname # Make sure the directory it's going into exists dir_name = os.path.dirname(fname) if not os.path.exists(dir_name): os.makedirs(dir_name) sig = hashlib.md5() if verbose: print('Downloading {} ...'.format(url)) if url.startswith('http://') or url.startswith('https://'): # Stream the URL as a file, copying to local disk with contextlib.closing(requests.get(url, stream=True)) as r: try: r.raise_for_status() except requests.exceptions.HTTPError as error: print('Error connecting to URL: "{}"'.format(url)) print(r.text) raise error with open(fname, 'wb') as f: content_length = r.headers.get('content-length') if content_length is not None: content_length = int(content_length) bar = FileTransferProgressBar(content_length) for k,chunk in enumerate(r.iter_content(chunk_size=chunk_size)): f.write(chunk) sig.update(chunk) if verbose: bar_val = chunk_size*(k+1) if content_length is not None: bar_val = min(bar_val, content_length) bar.update(bar_val) else: # e.g., ftp:// with contextlib.closing(urlopen(url)) as r: content_length = r.headers.get('content-length') if content_length is not None: content_length = int(content_length) bar = FileTransferProgressBar(content_length) with open(fname, 'wb') as f: k = 0 while True: chunk = r.read(chunk_size) if not chunk: break f.write(chunk) sig.update(chunk) if verbose: k += 1 bar_val = chunk_size*k if content_length is not None: bar_val = min(bar_val, content_length) bar.update(bar_val) if sig.hexdigest() != md5sum: raise DownloadError('The MD5 sum of the downloaded file is incorrect.\n' + ' download: {}\n'.format(sig.hexdigest()) + ' expected: {}\n'.format(md5sum)) return fname
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def download(url, fname=None): """ Downloads a file. Args: url (str): The URL to download. fname (Optional[str]): The filename to store the downloaded file in. If `None`, take the filename from the URL. Defaults to `None`. Returns: The filename the URL was downloaded to. Raises: requests.exceptions.HTTPError: There was a problem connecting to the URL. """
# Determine the filename if fname is None: fname = url.split('/')[-1] # Stream the URL as a file, copying to local disk with contextlib.closing(requests.get(url, stream=True)) as r: try: r.raise_for_status() except requests.exceptions.HTTPError as error: print('Error connecting to URL: "{}"'.format(url)) print(r.text) raise error with open(fname, 'wb') as f: shutil.copyfileobj(r.raw, f) return fname
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def dataverse_download_doi(doi, local_fname=None, file_requirements={}, clobber=False): """ Downloads a file from the Dataverse, using a DOI and set of metadata parameters to locate the file. Args: doi (str): Digital Object Identifier (DOI) containing the file. local_fname (Optional[str]): Local filename to download the file to. If `None`, then use the filename provided by the Dataverse. Defaults to `None`. file_requirements (Optional[dict]): Select the file containing the given metadata entries. If multiple files meet these requirements, only the first in downloaded. Defaults to `{}`, corresponding to no requirements. Raises: DownloadError: Either no matching file was found under the given DOI, or the MD5 sum of the file was not as expected. requests.exceptions.HTTPError: The given DOI does not exist, or there was a problem connecting to the Dataverse. """
metadata = dataverse_search_doi(doi) def requirements_match(metadata): for key in file_requirements.keys(): if metadata['dataFile'].get(key, None) != file_requirements[key]: return False return True for file_metadata in metadata['data']['latestVersion']['files']: if requirements_match(file_metadata): file_id = file_metadata['dataFile']['id'] md5sum = file_metadata['dataFile']['md5'] if local_fname is None: local_fname = file_metadata['dataFile']['filename'] # Check if the file already exists on disk if (not clobber) and os.path.isfile(local_fname): print('Checking existing file to see if MD5 sum matches ...') md5_existing = get_md5sum(local_fname) if md5_existing == md5sum: print('File exists. Not overwriting.') return print("Downloading data to '{}' ...".format(local_fname)) dataverse_download_id(file_id, md5sum, fname=local_fname, clobber=False) return raise DownloadError( 'No file found under the given DOI matches the requirements.\n' 'The metadata found for this DOI was:\n' + json.dumps(file_metadata, indent=2, sort_keys=True))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def address_reencode(address, blockchain='bitcoin', **blockchain_opts): """ Reencode an address """
if blockchain == 'bitcoin': return btc_address_reencode(address, **blockchain_opts) else: raise ValueError("Unknown blockchain '{}'".format(blockchain))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def is_multisig(privkey_info, blockchain='bitcoin', **blockchain_opts): """ Is the given private key bundle a multisig bundle? """
if blockchain == 'bitcoin': return btc_is_multisig(privkey_info, **blockchain_opts) else: raise ValueError('Unknown blockchain "{}"'.format(blockchain))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def is_multisig_address(addr, blockchain='bitcoin', **blockchain_opts): """ Is the given address a multisig address? """
if blockchain == 'bitcoin': return btc_is_multisig_address(addr, **blockchain_opts) else: raise ValueError('Unknown blockchain "{}"'.format(blockchain))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def is_multisig_script(script, blockchain='bitcoin', **blockchain_opts): """ Is the given script a multisig script? """
if blockchain == 'bitcoin': return btc_is_multisig_script(script, **blockchain_opts) else: raise ValueError('Unknown blockchain "{}"'.format(blockchain))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def is_singlesig(privkey_info, blockchain='bitcoin', **blockchain_opts): """ Is the given private key bundle a single-sig key bundle? """
if blockchain == 'bitcoin': return btc_is_singlesig(privkey_info, **blockchain_opts) else: raise ValueError('Unknown blockchain "{}"'.format(blockchain))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def is_singlesig_address(addr, blockchain='bitcoin', **blockchain_opts): """ Is the given address a single-sig address? """
if blockchain == 'bitcoin': return btc_is_singlesig_address(addr, **blockchain_opts) else: raise ValueError('Unknown blockchain "{}"'.format(blockchain))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_privkey_address(privkey_info, blockchain='bitcoin', **blockchain_opts): """ Get the address from a private key bundle """
if blockchain == 'bitcoin': return btc_get_privkey_address(privkey_info, **blockchain_opts) else: raise ValueError('Unknown blockchain "{}"'.format(blockchain))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def apply_grad_cartesian_tensor(grad_X, zmat_dist): """Apply the gradient for transformation to cartesian space onto zmat_dist. Args: grad_X (:class:`numpy.ndarray`): A ``(3, n, n, 3)`` array. The mathematical details of the index layout is explained in :meth:`~chemcoord.Cartesian.get_grad_zmat()`. zmat_dist (:class:`~chemcoord.Zmat`): Distortions in Zmatrix space. Returns: :class:`~chemcoord.Cartesian`: Distortions in cartesian space. """
columns = ['bond', 'angle', 'dihedral'] C_dist = zmat_dist.loc[:, columns].values.T try: C_dist = C_dist.astype('f8') C_dist[[1, 2], :] = np.radians(C_dist[[1, 2], :]) except (TypeError, AttributeError): C_dist[[1, 2], :] = sympy.rad(C_dist[[1, 2], :]) cart_dist = np.tensordot(grad_X, C_dist, axes=([3, 2], [0, 1])).T from chemcoord.cartesian_coordinates.cartesian_class_main import Cartesian return Cartesian(atoms=zmat_dist['atom'], coords=cart_dist, index=zmat_dist.index)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def register_model_converter(model, app): """Add url converter for model Example: class Student(db.model): id = Column(Integer, primary_key=True) name = Column(String(50)) register_model_converter(Student) @route('/classmates/<Student:classmate>') def get_classmate_info(classmate): pass This only support model's have single primary key. You need call this function before create view function. """
if hasattr(model, 'id'): class Converter(_ModelConverter): _model = model app.url_map.converters[model.__name__] = Converter
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def iupacify(self): """Give the IUPAC conform representation. Mathematically speaking the angles in a zmatrix are representations of an equivalence class. We will denote an equivalence relation with :math:`\\sim` and use :math:`\\alpha` for an angle and :math:`\\delta` for a dihedral angle. Then the following equations hold true. .. math:: (\\alpha, \\delta) &\sim (-\\alpha, \\delta + \\pi) \\\\ \\alpha &\sim \\alpha \\mod 2\\pi \\\\ \\delta &\sim \\delta \\mod 2\\pi `IUPAC <https://goldbook.iupac.org/html/T/T06406.html>`_ defines a designated representation of these equivalence classes, by asserting: .. math:: 0 \\leq &\\alpha \\leq \\pi \\\\ -\\pi \\leq &\\delta \\leq \\pi Args: None Returns: Zmat: Zmatrix with accordingly changed angles and dihedrals. """
def convert_d(d): r = d % 360 return r - (r // 180) * 360 new = self.copy() new.unsafe_loc[:, 'angle'] = new['angle'] % 360 select = new['angle'] > 180 new.unsafe_loc[select, 'angle'] = new.loc[select, 'angle'] - 180 new.unsafe_loc[select, 'dihedral'] = new.loc[select, 'dihedral'] + 180 new.unsafe_loc[:, 'dihedral'] = convert_d(new.loc[:, 'dihedral']) return new
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def minimize_dihedrals(self): r"""Give a representation of the dihedral with minimized absolute value. Mathematically speaking the angles in a zmatrix are representations of an equivalence class. We will denote an equivalence relation with :math:`\sim` and use :math:`\alpha` for an angle and :math:`\delta` for a dihedral angle. Then the following equations hold true. .. math:: (\alpha, \delta) &\sim (-\alpha, \delta + \pi) \\ \alpha &\sim \alpha \mod 2\pi \\ \delta &\sim \delta \mod 2\pi This function asserts: .. math:: -\pi \leq \delta \leq \pi The main application of this function is the construction of a transforming movement from ``zmat1`` to ``zmat2``. This is under the assumption that ``zmat1`` and ``zmat2`` are the same molecules (regarding their topology) and have the same construction table (:meth:`~Cartesian.get_construction_table`): : with cc.TestOperators(False): D = zm2 - zm1 zmats1 = [zm1 + D * i / n for i in range(n)] zmats2 = [zm1 + D.minimize_dihedrals() * i / n for i in range(n)] The movement described by ``zmats1`` might be too large, because going from :math:`5^\circ` to :math:`355^\circ` is :math:`350^\circ` in this case and not :math:`-10^\circ` as in ``zmats2`` which is the desired :math:`\Delta` in most cases. Args: None Returns: Zmat: Zmatrix with accordingly changed angles and dihedrals. """
new = self.copy() def convert_d(d): r = d % 360 return r - (r // 180) * 360 new.unsafe_loc[:, 'dihedral'] = convert_d(new.loc[:, 'dihedral']) return new
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def change_numbering(self, new_index=None): """Change numbering to a new index. Changes the numbering of index and all dependent numbering The user has to make sure that the new_index consists of distinct elements. Args: new_index (list): If None the new_index is taken from 1 to the number of atoms. Returns: Zmat: Reindexed version of the zmatrix. """
if (new_index is None): new_index = range(len(self)) elif len(new_index) != len(self): raise ValueError('len(new_index) has to be the same as len(self)') c_table = self.loc[:, ['b', 'a', 'd']] # Strange bug in pandas where .replace is transitive for object columns # and non-transitive for all other types. # (Remember that string columns are just object columns) # Example: # A = {1: 2, 2: 3} # Transtitive [1].replace(A) gives [3] # Non-Transtitive [1].replace(A) gives [2] # https://github.com/pandas-dev/pandas/issues/5338 # https://github.com/pandas-dev/pandas/issues/16051 # https://github.com/pandas-dev/pandas/issues/5541 # For this reason convert to int and replace then. c_table = c_table.replace(constants.int_label) try: c_table = c_table.astype('i8') except ValueError: raise ValueError('Due to a bug in pandas it is necessary to have ' 'integer columns') c_table = c_table.replace(self.index, new_index) c_table = c_table.replace( {v: k for k, v in constants.int_label.items()}) out = self.copy() out.unsafe_loc[:, ['b', 'a', 'd']] = c_table out._frame.index = new_index return out
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _insert_dummy_cart(self, exception, last_valid_cartesian=None): """Insert dummy atom into the already built cartesian of exception """
def get_normal_vec(cartesian, reference_labels): b_pos, a_pos, d_pos = cartesian._get_positions(reference_labels) BA = a_pos - b_pos AD = d_pos - a_pos N1 = np.cross(BA, AD) n1 = N1 / np.linalg.norm(N1) return n1 def insert_dummy(cartesian, reference_labels, n1): cartesian = cartesian.copy() b_pos, a_pos, d_pos = cartesian._get_positions(reference_labels) BA = a_pos - b_pos N2 = np.cross(n1, BA) n2 = N2 / np.linalg.norm(N2) i_dummy = max(self.index) + 1 cartesian.loc[i_dummy, 'atom'] = 'X' cartesian.loc[i_dummy, ['x', 'y', 'z']] = a_pos + n2 return cartesian, i_dummy if last_valid_cartesian is None: last_valid_cartesian = self._metadata['last_valid_cartesian'] ref_labels = self.loc[exception.index, ['b', 'a', 'd']] n1 = get_normal_vec(last_valid_cartesian, ref_labels) return insert_dummy(exception.already_built_cartesian, ref_labels, n1)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_cartesian(self): """Return the molecule in cartesian coordinates. Raises an :class:`~exceptions.InvalidReference` exception, if the reference of the i-th atom is undefined. Args: None Returns: Cartesian: Reindexed version of the zmatrix. """
def create_cartesian(positions, row): xyz_frame = pd.DataFrame(columns=['atom', 'x', 'y', 'z'], index=self.index[:row], dtype='f8') xyz_frame['atom'] = self.loc[xyz_frame.index, 'atom'] xyz_frame.loc[:, ['x', 'y', 'z']] = positions[:row] from chemcoord.cartesian_coordinates.cartesian_class_main \ import Cartesian cartesian = Cartesian(xyz_frame, metadata=self.metadata) return cartesian c_table = self.loc[:, ['b', 'a', 'd']] c_table = c_table.replace(constants.int_label) c_table = c_table.replace({k: v for v, k in enumerate(c_table.index)}) c_table = c_table.values.astype('i8').T C = self.loc[:, ['bond', 'angle', 'dihedral']].values.T C[[1, 2], :] = np.radians(C[[1, 2], :]) err, row, positions = transformation.get_X(C, c_table) positions = positions.T if err == ERR_CODE_InvalidReference: rename = dict(enumerate(self.index)) i = rename[row] b, a, d = self.loc[i, ['b', 'a', 'd']] cartesian = create_cartesian(positions, row) raise InvalidReference(i=i, b=b, a=a, d=d, already_built_cartesian=cartesian) elif err == ERR_CODE_OK: return create_cartesian(positions, row + 1)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_grad_cartesian(self, as_function=True, chain=True, drop_auto_dummies=True): r"""Return the gradient for the transformation to a Cartesian. If ``as_function`` is True, a function is returned that can be directly applied onto instances of :class:`~Zmat`, which contain the applied distortions in Zmatrix space. In this case the user does not have to worry about indexing and correct application of the tensor product. Basically this is the function :func:`zmat_functions.apply_grad_cartesian_tensor` with partially replaced arguments. If ``as_function`` is False, a ``(3, n, n, 3)`` tensor is returned, which contains the values of the derivatives. Since a ``n * 3`` matrix is deriven after a ``n * 3`` matrix, it is important to specify the used rules for indexing the resulting tensor. The rule is very simple: The indices of the numerator are used first then the indices of the denominator get swapped and appended: .. math:: \left( \frac{\partial \mathbf{Y}}{\partial \mathbf{X}} \right)_{i, j, k, l} = \frac{\partial \mathbf{Y}_{i, j}}{\partial \mathbf{X}_{l, k}} Applying this rule to an example function: .. math:: f \colon \mathbb{R}^3 \rightarrow \mathbb{R} Gives as derivative the known row-vector gradient: .. math:: (\nabla f)_{1, i} = \frac{\partial f}{\partial x_i} \qquad i \in \{1, 2, 3\} .. note:: The row wise alignment of the zmat files makes sense for these CSV like files. But it is mathematically advantageous and sometimes (depending on the memory layout) numerically better to use a column wise alignment of the coordinates. In this function the resulting tensor assumes a ``3 * n`` array for the coordinates. If .. math:: \mathbf{C}_{i, j} &\qquad 1 \leq i \leq 3, \quad 1 \leq j \leq n \\ \mathbf{X}_{i, j} &\qquad 1 \leq i \leq 3, \quad 1 \leq j \leq n denote the positions in Zmatrix and cartesian space, The complete tensor may be written as: .. math:: \left( \frac{\partial \mathbf{X}}{\partial \mathbf{C}} \right)_{i, j, k, l} = \frac{\partial \mathbf{X}_{i, j}}{\partial \mathbf{C}_{l, k}} Args: construction_table (pandas.DataFrame): as_function (bool): Return a tensor or :func:`xyz_functions.apply_grad_zmat_tensor` with partially replaced arguments. chain (bool): drop_auto_dummies (bool): Drop automatically created dummies from the gradient. This means, that only changes in regularly placed atoms are considered for the gradient. Returns: (func, :class:`numpy.ndarray`): Depending on ``as_function`` return a tensor or :func:`~chemcoord.zmat_functions.apply_grad_cartesian_tensor` with partially replaced arguments. """
zmat = self.change_numbering() c_table = zmat.loc[:, ['b', 'a', 'd']] c_table = c_table.replace(constants.int_label).values.T C = zmat.loc[:, ['bond', 'angle', 'dihedral']].values.T if C.dtype == np.dtype('i8'): C = C.astype('f8') C[[1, 2], :] = np.radians(C[[1, 2], :]) grad_X = transformation.get_grad_X(C, c_table, chain=chain) if drop_auto_dummies: def drop_dummies(grad_X, zmolecule): rename = dict(zip(zmolecule.index, range(len(zmolecule)))) dummies = [rename[v['dummy_d']] for v in self._metadata['has_dummies'].values()] excluded = np.full(grad_X.shape[1], True) excluded[dummies] = False coord_rows = np.full(3, True) selection = np.ix_(coord_rows, excluded, excluded, coord_rows) return grad_X[selection] grad_X = drop_dummies(grad_X, self) if as_function: from chemcoord.internal_coordinates.zmat_functions import ( apply_grad_cartesian_tensor) return partial(apply_grad_cartesian_tensor, grad_X) else: return grad_X
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def tx_extend(partial_tx_hex, new_inputs, new_outputs, blockchain='bitcoin', **blockchain_opts): """ Add a set of inputs and outputs to a tx. Return the new tx on success Raise on error """
if blockchain == 'bitcoin': return btc_tx_extend(partial_tx_hex, new_inputs, new_outputs, **blockchain_opts) else: raise ValueError('Unknown blockchain "{}"'.format(blockchain))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def setcontext(context, _local=local): """ Set the current context to that given. Attributes provided by ``context`` override those in the current context. If ``context`` doesn't specify a particular attribute, the attribute from the current context shows through. """
oldcontext = getcontext() _local.__bigfloat_context__ = oldcontext + context
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _apply_function_in_context(cls, f, args, context): """ Apply an MPFR function 'f' to the given arguments 'args', rounding to the given context. Returns a new Mpfr object with precision taken from the current context. """
rounding = context.rounding bf = mpfr.Mpfr_t.__new__(cls) mpfr.mpfr_init2(bf, context.precision) args = (bf,) + args + (rounding,) ternary = f(*args) with _temporary_exponent_bounds(context.emin, context.emax): ternary = mpfr.mpfr_check_range(bf, ternary, rounding) if context.subnormalize: # mpfr_subnormalize doesn't set underflow and # subnormal flags, so we do that ourselves. We choose # to set the underflow flag for *all* cases where the # 'after rounding' result is smaller than the smallest # normal number, even if that result is exact. # if bf is zero but ternary is nonzero, the underflow # flag will already have been set by mpfr_check_range; underflow = ( mpfr.mpfr_number_p(bf) and not mpfr.mpfr_zero_p(bf) and mpfr.mpfr_get_exp(bf) < context.precision - 1 + context.emin) if underflow: mpfr.mpfr_set_underflow() ternary = mpfr.mpfr_subnormalize(bf, ternary, rounding) if ternary: mpfr.mpfr_set_inexflag() return bf
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_logger(name=None): """ Get virtualchain's logger """
level = logging.CRITICAL if DEBUG: logging.disable(logging.NOTSET) level = logging.DEBUG if name is None: name = "<unknown>" log = logging.getLogger(name=name) log.setLevel( level ) console = logging.StreamHandler() console.setLevel( level ) log_format = ('[%(asctime)s] [%(levelname)s] [%(module)s:%(lineno)d] (' + str(os.getpid()) + '.%(thread)d) %(message)s' if DEBUG else '%(message)s') formatter = logging.Formatter( log_format ) console.setFormatter(formatter) log.propagate = False if len(log.handlers) > 0: for i in xrange(0, len(log.handlers)): log.handlers.pop(0) log.addHandler(console) return log
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_config_filename(impl, working_dir): """ Get the absolute path to the config file. """
config_filename = impl.get_virtual_chain_name() + ".ini" return os.path.join(working_dir, config_filename)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_db_filename(impl, working_dir): """ Get the absolute path to the last-block file. """
db_filename = impl.get_virtual_chain_name() + ".db" return os.path.join(working_dir, db_filename)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_snapshots_filename(impl, working_dir): """ Get the absolute path to the chain's consensus snapshots file. """
snapshots_filename = impl.get_virtual_chain_name() + ".snapshots" return os.path.join(working_dir, snapshots_filename)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_lockfile_filename(impl, working_dir): """ Get the absolute path to the chain's indexing lockfile """
lockfile_name = impl.get_virtual_chain_name() + ".lock" return os.path.join(working_dir, lockfile_name)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_bitcoind_config(config_file=None, impl=None): """ Set bitcoind options globally. Call this before trying to talk to bitcoind. """
loaded = False bitcoind_server = None bitcoind_port = None bitcoind_user = None bitcoind_passwd = None bitcoind_timeout = None bitcoind_regtest = None bitcoind_p2p_port = None bitcoind_spv_path = None regtest = None if config_file is not None: parser = SafeConfigParser() parser.read(config_file) if parser.has_section('bitcoind'): if parser.has_option('bitcoind', 'server'): bitcoind_server = parser.get('bitcoind', 'server') if parser.has_option('bitcoind', 'port'): bitcoind_port = int(parser.get('bitcoind', 'port')) if parser.has_option('bitcoind', 'p2p_port'): bitcoind_p2p_port = int(parser.get('bitcoind', 'p2p_port')) if parser.has_option('bitcoind', 'user'): bitcoind_user = parser.get('bitcoind', 'user') if parser.has_option('bitcoind', 'passwd'): bitcoind_passwd = parser.get('bitcoind', 'passwd') if parser.has_option('bitcoind', 'spv_path'): bitcoind_spv_path = parser.get('bitcoind', 'spv_path') if parser.has_option('bitcoind', 'regtest'): regtest = parser.get('bitcoind', 'regtest') else: regtest = 'no' if parser.has_option('bitcoind', 'timeout'): bitcoind_timeout = float(parser.get('bitcoind', 'timeout')) if regtest.lower() in ["yes", "y", "true", "1", "on"]: bitcoind_regtest = True else: bitcoind_regtest = False loaded = True if not loaded: bitcoind_server = 'bitcoin.blockstack.com' bitcoind_port = 8332 bitcoind_user = 'blockstack' bitcoind_passwd = 'blockstacksystem' bitcoind_regtest = False bitcoind_timeout = 300 bitcoind_p2p_port = 8333 bitcoind_spv_path = os.path.expanduser("~/.virtualchain-spv-headers.dat") default_bitcoin_opts = { "bitcoind_user": bitcoind_user, "bitcoind_passwd": bitcoind_passwd, "bitcoind_server": bitcoind_server, "bitcoind_port": bitcoind_port, "bitcoind_timeout": bitcoind_timeout, "bitcoind_regtest": bitcoind_regtest, "bitcoind_p2p_port": bitcoind_p2p_port, "bitcoind_spv_path": bitcoind_spv_path } return default_bitcoin_opts
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def is_empty(self): """Returns True if all child date elements present are empty and other nodes are not set. Returns False if any child date elements are not empty or other nodes are set."""
return all(date.is_empty() for date in [self.created, self.issued]) \ and not self.publisher
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def is_empty(self): '''Returns True if all titleInfo subfields are not set or empty; returns False if any of the fields are not empty.''' return not bool(self.title or self.subtitle or self.part_number \ or self.part_name or self.non_sort or self.type)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def is_empty(self): '''Returns True if details, extent, and type are not set or return True for ``is_empty``; returns False if any of the fields are not empty.''' return all(field.is_empty() for field in [self.details, self.extent] if field is not None) \ and not self.type
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def getinfo(self): """ Backwards-compatibility for 0.14 and later """
try: old_getinfo = AuthServiceProxy(self.__service_url, 'getinfo', self.__timeout, self.__conn, True) res = old_getinfo() if 'error' not in res: # 0.13 and earlier return res except JSONRPCException: pass network_info = self.getnetworkinfo() blockchain_info = self.getblockchaininfo() try: wallet_info = self.getwalletinfo() except: wallet_info = { 'walletversion': None, 'balance': None, 'keypoololdest': None, 'keypoolsize': None, 'paytxfee': None, } res = { 'version': network_info['version'], 'protocolversion': network_info['protocolversion'], 'walletversion': wallet_info['walletversion'], 'balance': wallet_info['balance'], 'blocks': blockchain_info['blocks'], 'timeoffset': network_info['timeoffset'], 'connections': network_info['connections'], 'proxy': network_info['networks'], 'difficulty': blockchain_info['difficulty'], 'testnet': blockchain_info['chain'] == 'testnet', 'keypoololdest': wallet_info['keypoololdest'], 'keypoolsize': wallet_info['keypoolsize'], 'paytxfee': wallet_info['paytxfee'], 'errors': network_info['warnings'], } for k in ['unlocked_until', 'relayfee', 'paytxfee']: if wallet_info.has_key(k): res[k] = wallet_info[k] return res
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def btc_make_payment_script( address, segwit=None, **ignored ): """ Make a pay-to-address script. """
if segwit is None: segwit = get_features('segwit') # is address bech32-encoded? witver, withash = segwit_addr_decode(address) if witver is not None and withash is not None: # bech32 segwit address if not segwit: raise ValueError("Segwit is disabled") if len(withash) == 20: # p2wpkh script_hex = '0014' + withash.encode('hex') return script_hex elif len(withash) == 32: # p2wsh script_hex = '0020' + withash.encode('hex') return script_hex else: raise ValueError("Unrecognized address '%s'" % address ) else: # address is b58check-encoded vb = keylib.b58check.b58check_version_byte(address) if vb == version_byte: # p2pkh hash160 = binascii.hexlify( keylib.b58check.b58check_decode(address) ) script = 'OP_DUP OP_HASH160 {} OP_EQUALVERIFY OP_CHECKSIG'.format(hash160) script_hex = btc_script_to_hex(script) return script_hex elif vb == multisig_version_byte: # p2sh hash160 = binascii.hexlify( keylib.b58check.b58check_decode(address) ) script = 'OP_HASH160 {} OP_EQUAL'.format(hash160) script_hex = btc_script_to_hex(script) return script_hex else: raise ValueError("Unrecognized address '%s'" % address )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def btc_make_data_script( data, **ignored ): """ Make a data-bearing transaction output. Data must be a hex string Returns a hex string. """
if len(data) >= MAX_DATA_LEN * 2: raise ValueError("Data hex string is too long") # note: data is a hex string if len(data) % 2 != 0: raise ValueError("Data hex string is not even length") return "6a{:02x}{}".format(len(data)/2, data)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def btc_make_p2sh_address( script_hex ): """ Make a P2SH address from a hex script """
h = hashing.bin_hash160(binascii.unhexlify(script_hex)) addr = bin_hash160_to_address(h, version_byte=multisig_version_byte) return addr
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def btc_make_p2wpkh_address( pubkey_hex ): """ Make a p2wpkh address from a hex pubkey """
pubkey_hex = keylib.key_formatting.compress(pubkey_hex) hash160_bin = hashing.bin_hash160(pubkey_hex.decode('hex')) return segwit_addr_encode(hash160_bin)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def btc_make_p2sh_p2wpkh_redeem_script( pubkey_hex ): """ Make the redeem script for a p2sh-p2wpkh witness script """
pubkey_hash = hashing.bin_hash160(pubkey_hex.decode('hex')).encode('hex') redeem_script = btc_script_serialize(['0014' + pubkey_hash]) return redeem_script
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def btc_make_p2sh_p2wsh_redeem_script( witness_script_hex ): """ Make the redeem script for a p2sh-p2wsh witness script """
witness_script_hash = hashing.bin_sha256(witness_script_hex.decode('hex')).encode('hex') redeem_script = btc_script_serialize(['0020' + witness_script_hash]) return redeem_script
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def btc_is_p2sh_address( address ): """ Is the given address a p2sh address? """
vb = keylib.b58check.b58check_version_byte( address ) if vb == multisig_version_byte: return True else: return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def btc_is_p2pkh_address( address ): """ Is the given address a p2pkh address? """
vb = keylib.b58check.b58check_version_byte( address ) if vb == version_byte: return True else: return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def btc_is_p2wpkh_address( address ): """ Is the given address a p2wpkh address? """
wver, whash = segwit_addr_decode(address) if whash is None: return False if len(whash) != 20: return False return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def btc_is_p2wsh_address( address ): """ Is the given address a p2wsh address? """
wver, whash = segwit_addr_decode(address) if whash is None: return False if len(whash) != 32: return False return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def btc_is_p2sh_script( script_hex ): """ Is the given scriptpubkey a p2sh script? """
if script_hex.startswith("a914") and script_hex.endswith("87") and len(script_hex) == 46: return True else: return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def btc_address_reencode( address, **blockchain_opts ): """ Depending on whether or not we're in testnet or mainnet, re-encode an address accordingly. """
# re-encode bitcoin address network = blockchain_opts.get('network', None) opt_version_byte = blockchain_opts.get('version_byte', None) if btc_is_segwit_address(address): # bech32 address hrp = None if network == 'mainnet': hrp = 'bc' elif network == 'testnet': hrp = 'tb' else: if os.environ.get('BLOCKSTACK_TESTNET') == '1' or os.environ.get('BLOCKSTACK_TESTNET3') == '1': hrp = 'tb' else: hrp = 'bc' wver, whash = segwit_addr_decode(address) return segwit_addr_encode(whash, hrp=hrp, witver=wver) else: # base58 address vb = keylib.b58check.b58check_version_byte( address ) if network == 'mainnet': if vb == 0 or vb == 111: vb = 0 elif vb == 5 or vb == 196: vb = 5 else: raise ValueError("Unrecognized address %s" % address) elif network == 'testnet': if vb == 0 or vb == 111: vb = 111 elif vb == 5 or vb == 196: vb = 196 else: raise ValueError("Unrecognized address %s" % address) else: if opt_version_byte is not None: vb = opt_version_byte elif os.environ.get("BLOCKSTACK_TESTNET") == "1" or os.environ.get("BLOCKSTACK_TESTNET3") == "1": if vb == 0 or vb == 111: # convert to testnet p2pkh vb = 111 elif vb == 5 or vb == 196: # convert to testnet p2sh vb = 196 else: raise ValueError("unrecognized address %s" % address) else: if vb == 0 or vb == 111: # convert to mainnet p2pkh vb = 0 elif vb == 5 or vb == 196: # convert to mainnet p2sh vb = 5 else: raise ValueError("unrecognized address %s" % address) return keylib.b58check.b58check_encode( keylib.b58check.b58check_decode(address), vb )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def btc_is_singlesig_segwit(privkey_info): """ Is the given key bundle a p2sh-p2wpkh key bundle? """
try: jsonschema.validate(privkey_info, PRIVKEY_MULTISIG_SCHEMA) if len(privkey_info['private_keys']) > 1: return False return privkey_info.get('segwit', False) except ValidationError: return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def segwit_addr_encode(witprog_bin, hrp=bech32_prefix, witver=bech32_witver): """ Encode a segwit script hash to a bech32 address. Returns the bech32-encoded string on success """
witprog_bytes = [ord(c) for c in witprog_bin] ret = bech32_encode(hrp, [int(witver)] + convertbits(witprog_bytes, 8, 5)) assert segwit_addr_decode(hrp, ret) is not (None, None) return ret
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def format_git_describe(git_str, pep440=False): """format the result of calling 'git describe' as a python version"""
if git_str is None: return None if "-" not in git_str: # currently at a tag return git_str else: # formatted as version-N-githash # want to convert to version.postN-githash git_str = git_str.replace("-", ".post", 1) if pep440: # does not allow git hash afterwards return git_str.split("-")[0] else: return git_str.replace("-g", "+git")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def update_release_version(): """Update VERSION file"""
version = get_version(pep440=True) with open(VERSION_FILE, "w") as outfile: outfile.write(version) outfile.write("\n")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_version(pep440=False): """Tracks the version number. pep440: bool When True, this function returns a version string suitable for a release as defined by PEP 440. When False, the githash (if available) will be appended to the version string. The file VERSION holds the version information. If this is not a git repository, then it is reasonable to assume that the version is not being incremented and the version returned will be the release version as read from the file. However, if the script is located within an active git repository, git-describe is used to get the version information. The file VERSION will need to be changed by manually. This should be done before running git tag (set to the same as the version in the tag). """
git_version = format_git_describe(call_git_describe(), pep440=pep440) if git_version is None: # not a git repository return read_release_version() return git_version
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def call_git_branch(): """return the string output of git desribe"""
try: with open(devnull, "w") as fnull: arguments = [GIT_COMMAND, 'rev-parse', '--abbrev-ref', 'HEAD'] return check_output(arguments, cwd=CURRENT_DIRECTORY, stderr=fnull).decode("ascii").strip() except (OSError, CalledProcessError): return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def load_xmlobject_from_string(string, xmlclass=XmlObject, validate=False, resolver=None): """Initialize an XmlObject from a string. If an xmlclass is specified, construct an instance of that class instead of :class:`~eulxml.xmlmap.XmlObject`. It should be a subclass of XmlObject. The constructor will be passed a single node. If validation is requested and the specified subclass of :class:`XmlObject` has an XSD_SCHEMA defined, the parser will be configured to validate against the specified schema. Otherwise, the parser will be configured to use DTD validation, and expect a Doctype declaration in the xml content. :param string: xml content to be loaded, as a string :param xmlclass: subclass of :class:`~eulxml.xmlmap.XmlObject` to initialize :param validate: boolean, enable validation; defaults to false :rtype: instance of :class:`~eulxml.xmlmap.XmlObject` requested """
parser = _get_xmlparser(xmlclass=xmlclass, validate=validate, resolver=resolver) element = etree.fromstring(string, parser) return xmlclass(element)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def load_xmlobject_from_file(filename, xmlclass=XmlObject, validate=False, resolver=None): """Initialize an XmlObject from a file. See :meth:`load_xmlobject_from_string` for more details; behaves exactly the same, and accepts the same parameters, except that it takes a filename instead of a string. :param filename: name of the file that should be loaded as an xmlobject. :meth:`etree.lxml.parse` will accept a file name/path, a file object, a file-like object, or an HTTP or FTP url, however file path and URL are recommended, as they are generally faster for lxml to handle. """
parser = _get_xmlparser(xmlclass=xmlclass, validate=validate, resolver=resolver) tree = etree.parse(filename, parser) return xmlclass(tree.getroot())
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def read_zmat(cls, inputfile, implicit_index=True): """Reads a zmat file. Lines beginning with ``#`` are ignored. Args: inputfile (str): implicit_index (bool): If this option is true the first column has to be the element symbols for the atoms. The row number is used to determine the index. Returns: Zmat: """
cols = ['atom', 'b', 'bond', 'a', 'angle', 'd', 'dihedral'] if implicit_index: zmat_frame = pd.read_table(inputfile, comment='#', delim_whitespace=True, names=cols) zmat_frame.index = range(1, len(zmat_frame) + 1) else: zmat_frame = pd.read_table(inputfile, comment='#', delim_whitespace=True, names=['temp_index'] + cols) zmat_frame.set_index('temp_index', drop=True, inplace=True) zmat_frame.index.name = None if pd.isnull(zmat_frame.iloc[0, 1]): zmat_values = [1.27, 127., 127.] zmat_refs = [constants.int_label[x] for x in ['origin', 'e_z', 'e_x']] for row, i in enumerate(zmat_frame.index[:3]): cols = ['b', 'a', 'd'] zmat_frame.loc[:, cols] = zmat_frame.loc[:, cols].astype('O') if row < 2: zmat_frame.loc[i, cols[row:]] = zmat_refs[row:] zmat_frame.loc[i, ['bond', 'angle', 'dihedral'][row:] ] = zmat_values[row:] else: zmat_frame.loc[i, 'd'] = zmat_refs[2] zmat_frame.loc[i, 'dihedral'] = zmat_values[2] elif zmat_frame.iloc[0, 1] in constants.int_label.keys(): zmat_frame = zmat_frame.replace( {col: constants.int_label for col in ['b', 'a', 'd']}) zmat_frame = cls._cast_correct_types(zmat_frame) try: Zmat = cls(zmat_frame) except InvalidReference: raise UndefinedCoordinateSystem( 'Your zmatrix cannot be transformed to cartesian coordinates') return Zmat
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def to_zmat(self, buf=None, upper_triangle=True, implicit_index=True, float_format='{:.6f}'.format, overwrite=True, header=False): """Write zmat-file Args: buf (str): StringIO-like, optional buffer to write to implicit_index (bool): If implicit_index is set, the zmat indexing is changed to ``range(1, len(self) + 1)``. Using :meth:`~chemcoord.Zmat.change_numbering` Besides the index is omitted while writing which means, that the index is given implicitly by the row number. float_format (one-parameter function): Formatter function to apply to column’s elements if they are floats. The result of this function must be a unicode string. overwrite (bool): May overwrite existing files. Returns: formatted : string (or unicode, depending on data and options) """
out = self.copy() if implicit_index: out = out.change_numbering(new_index=range(1, len(self) + 1)) if not upper_triangle: out = out._remove_upper_triangle() output = out.to_string(index=(not implicit_index), float_format=float_format, header=header) if buf is not None: if overwrite: with open(buf, mode='w') as f: f.write(output) else: with open(buf, mode='x') as f: f.write(output) else: return output
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def to_xyz(self, buf=None, sort_index=True, index=False, header=False, float_format='{:.6f}'.format, overwrite=True): """Write xyz-file Args: buf (str): StringIO-like, optional buffer to write to sort_index (bool): If sort_index is true, the :class:`~chemcoord.Cartesian` is sorted by the index before writing. float_format (one-parameter function): Formatter function to apply to column’s elements if they are floats. The result of this function must be a unicode string. overwrite (bool): May overwrite existing files. Returns: formatted : string (or unicode, depending on data and options) """
if sort_index: molecule_string = self.sort_index().to_string( header=header, index=index, float_format=float_format) else: molecule_string = self.to_string(header=header, index=index, float_format=float_format) # NOTE the following might be removed in the future # introduced because of formatting bug in pandas # See https://github.com/pandas-dev/pandas/issues/13032 space = ' ' * (self.loc[:, 'atom'].str.len().max() - len(self.iloc[0, 0])) output = '{n}\n{message}\n{alignment}{frame_string}'.format( n=len(self), alignment=space, frame_string=molecule_string, message='Created by chemcoord http://chemcoord.readthedocs.io/') if buf is not None: if overwrite: with open(buf, mode='w') as f: f.write(output) else: with open(buf, mode='x') as f: f.write(output) else: return output
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def read_xyz(cls, buf, start_index=0, get_bonds=True, nrows=None, engine=None): """Read a file of coordinate information. Reads xyz-files. Args: inputfile (str): start_index (int): get_bonds (bool): nrows (int): Number of rows of file to read. Note that the first two rows are implicitly excluded. engine (str): Wrapper for argument of :func:`pandas.read_csv`. Returns: Cartesian: """
frame = pd.read_table(buf, skiprows=2, comment='#', nrows=nrows, delim_whitespace=True, names=['atom', 'x', 'y', 'z'], engine=engine) remove_digits = partial(re.sub, r'[0-9]+', '') frame['atom'] = frame['atom'].apply(remove_digits) molecule = cls(frame) molecule.index = range(start_index, start_index + len(molecule)) if get_bonds: molecule.get_bonds(use_lookup=False, set_lookup=True) return molecule
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def to_cjson(self, buf=None, **kwargs): """Write a cjson file or return dictionary. The cjson format is specified `here <https://github.com/OpenChemistry/chemicaljson>`_. Args: buf (str): If it is a filepath, the data is written to filepath. If it is None, a dictionary with the cjson information is returned. kwargs: The keyword arguments are passed into the ``dump`` function of the `json library <https://docs.python.org/3/library/json.html>`_. Returns: dict: """
cjson_dict = {'chemical json': 0} cjson_dict['atoms'] = {} atomic_number = constants.elements['atomic_number'].to_dict() cjson_dict['atoms'] = {'elements': {}} cjson_dict['atoms']['elements']['number'] = [ int(atomic_number[x]) for x in self['atom']] cjson_dict['atoms']['coords'] = {} coords = self.loc[:, ['x', 'y', 'z']].values.reshape(len(self) * 3) cjson_dict['atoms']['coords']['3d'] = [float(x) for x in coords] bonds = [] bond_dict = self.get_bonds() for i in bond_dict: for b in bond_dict[i]: bonds += [int(i), int(b)] bond_dict[b].remove(i) cjson_dict['bonds'] = {'connections': {}} cjson_dict['bonds']['connections']['index'] = bonds if buf is not None: with open(buf, mode='w') as f: f.write(json.dumps(cjson_dict, **kwargs)) else: return cjson_dict
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def read_cjson(cls, buf): """Read a cjson file or a dictionary. The cjson format is specified `here <https://github.com/OpenChemistry/chemicaljson>`_. Args: buf (str, dict): If it is a filepath, the data is read from filepath. If it is a dictionary, the dictionary is interpreted as cjson. Returns: Cartesian: """
if isinstance(buf, dict): data = buf.copy() else: with open(buf, 'r') as f: data = json.load(f) assert data['chemical json'] == 0 n_atoms = len(data['atoms']['coords']['3d']) metadata = {} _metadata = {} coords = np.array( data['atoms']['coords']['3d']).reshape((n_atoms // 3, 3)) atomic_number = constants.elements['atomic_number'] elements = [dict(zip(atomic_number, atomic_number.index))[x] for x in data['atoms']['elements']['number']] try: connections = data['bonds']['connections']['index'] except KeyError: pass else: bond_dict = defaultdict(set) for i, b in zip(connections[::2], connections[1::2]): bond_dict[i].add(b) bond_dict[b].add(i) _metadata['bond_dict'] = dict(bond_dict) try: metadata.update(data['properties']) except KeyError: pass out = cls(atoms=elements, coords=coords, _metadata=_metadata, metadata=metadata) return out
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def view(self, viewer=None, use_curr_dir=False): """View your molecule. .. note:: This function writes a temporary file and opens it with an external viewer. If you modify your molecule afterwards you have to recall view in order to see the changes. Args: viewer (str): The external viewer to use. If it is None, the default as specified in cc.settings['defaults']['viewer'] is used. use_curr_dir (bool): If True, the temporary file is written to the current diretory. Otherwise it gets written to the OS dependendent temporary directory. Returns: None: """
if viewer is None: viewer = settings['defaults']['viewer'] if use_curr_dir: TEMP_DIR = os.path.curdir else: TEMP_DIR = tempfile.gettempdir() def give_filename(i): filename = 'ChemCoord_' + str(i) + '.xyz' return os.path.join(TEMP_DIR, filename) i = 1 while os.path.exists(give_filename(i)): i = i + 1 self.to_xyz(give_filename(i)) def open_file(i): """Open file and close after being finished.""" try: subprocess.check_call([viewer, give_filename(i)]) except (subprocess.CalledProcessError, FileNotFoundError): raise finally: if use_curr_dir: pass else: os.remove(give_filename(i)) Thread(target=open_file, args=(i,)).start()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_pymatgen_molecule(self): """Create a Molecule instance of the pymatgen library .. warning:: The `pymatgen library <http://pymatgen.org>`_ is imported locally in this function and will raise an ``ImportError`` exception, if it is not installed. Args: None Returns: :class:`pymatgen.core.structure.Molecule`: """
from pymatgen import Molecule return Molecule(self['atom'].values, self.loc[:, ['x', 'y', 'z']].values)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def from_pymatgen_molecule(cls, molecule): """Create an instance of the own class from a pymatgen molecule Args: molecule (:class:`pymatgen.core.structure.Molecule`): Returns: Cartesian: """
new = cls(atoms=[el.value for el in molecule.species], coords=molecule.cart_coords) return new._to_numeric()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def from_ase_atoms(cls, atoms): """Create an instance of the own class from an ase molecule Args: molecule (:class:`ase.atoms.Atoms`): Returns: Cartesian: """
return cls(atoms=atoms.get_chemical_symbols(), coords=atoms.positions)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _convert_eq(self, eq): """WORKS INPLACE on eq """
rename = dict(enumerate(self.index)) eq['eq_sets'] = {rename[k]: {rename[x] for x in v} for k, v in eq['eq_sets'].items()} eq['sym_ops'] = {rename[k]: {rename[x]: v[x] for x in v} for k, v in eq['sym_ops'].items()} try: sym_mol = self.from_pymatgen_molecule(eq['sym_mol']) sym_mol.index = self.index eq['sym_mol'] = sym_mol._to_numeric() except KeyError: pass
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_pointgroup(self, tolerance=0.3): """Returns a PointGroup object for the molecule. Args: tolerance (float): Tolerance to generate the full set of symmetry operations. Returns: :class:`~PointGroupOperations` """
PA = self._get_point_group_analyzer(tolerance=tolerance) return PointGroupOperations(PA.sch_symbol, PA.symmops)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_equivalent_atoms(self, tolerance=0.3): """Returns sets of equivalent atoms with symmetry operations Args: tolerance (float): Tolerance to generate the full set of symmetry operations. Returns: dict: The returned dictionary has two possible keys: ``eq_sets``: A dictionary of indices mapping to sets of indices, each key maps to indices of all equivalent atoms. The keys are guaranteed to be not equivalent. ``sym_ops``: Twofold nested dictionary. ``operations[i][j]`` gives the symmetry operation that maps atom ``i`` unto ``j``. """
PA = self._get_point_group_analyzer(tolerance=tolerance) eq = PA.get_equivalent_atoms() self._convert_eq(eq) return eq
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def btc_tx_serialize(_txobj): """ Given a transaction dict returned by btc_tx_deserialize, convert it back into a hex-encoded byte string. Derived from code written by Vitalik Buterin in pybitcointools (https://github.com/vbuterin/pybitcointools) """
# output buffer o = [] txobj = None if encoding.json_is_base(_txobj, 16): # txobj is built from hex strings already. deserialize them txobj = encoding.json_changebase(_txobj, lambda x: binascii.unhexlify(x)) else: txobj = copy.deepcopy(_txobj) # version o.append(encoding.encode(txobj["version"], 256, 4)[::-1]) # do we have any witness scripts? have_witness = False for inp in txobj['ins']: if inp.has_key('witness_script') and len(inp['witness_script']) > 0: have_witness = True break if have_witness: # add segwit marker o.append('\x00\x01') # number of inputs o.append(encoding.num_to_var_int(len(txobj["ins"]))) # all inputs for inp in txobj["ins"]: # input tx hash o.append(inp["outpoint"]["hash"][::-1]) # input tx outpoint o.append(encoding.encode(inp["outpoint"]["index"], 256, 4)[::-1]) # input scriptsig script = inp.get('script') if not script: script = bytes() scriptsig = encoding.num_to_var_int(len(script)) + script o.append(scriptsig) # sequence o.append(encoding.encode(inp.get("sequence", UINT_MAX - 1), 256, 4)[::-1]) # number of outputs o.append(encoding.num_to_var_int(len(txobj["outs"]))) # all outputs for out in txobj["outs"]: # value o.append(encoding.encode(out["value"], 256, 8)[::-1]) # scriptPubKey scriptpubkey = encoding.num_to_var_int(len(out['script'])) + out['script'] o.append(scriptpubkey) # add witnesses if have_witness: for inp in txobj['ins']: witness_script = inp.get('witness_script') if not witness_script: witness_script = '\x00' o.append(witness_script) # locktime o.append(encoding.encode(txobj["locktime"], 256, 4)[::-1]) # full string ret = ''.join( encoding.json_changebase(o, lambda x: encoding.safe_hexlify(x)) ) return ret
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def btc_tx_witness_strip( tx_serialized ): """ Strip the witness information from a serialized transaction """
if not btc_tx_is_segwit(tx_serialized): # already strippped return tx_serialized tx = btc_tx_deserialize(tx_serialized) for inp in tx['ins']: del inp['witness_script'] tx_stripped = btc_tx_serialize(tx) return tx_stripped
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def btc_tx_script_to_asm( script_hex ): """ Decode a script into assembler """
if len(script_hex) == 0: return "" try: script_array = btc_script_deserialize(script_hex) except: log.error("Failed to convert '%s' to assembler" % script_hex) raise script_tokens = [] for token in script_array: if token is None: token = 0 token_name = None if type(token) in [int,long]: token_name = OPCODE_NAMES.get(token, None) if token_name is None: token_name = str(token) else: token_name = token script_tokens.append(token_name) return " ".join(script_tokens)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def btc_tx_extend(partial_tx_hex, new_inputs, new_outputs, **blockchain_opts): """ Given an unsigned serialized transaction, add more inputs and outputs to it. @new_inputs and @new_outputs will be virtualchain-formatted: """
# recover tx tx = btc_tx_deserialize(partial_tx_hex) tx_inputs, tx_outputs = tx['ins'], tx['outs'] locktime, version = tx['locktime'], tx['version'] tx_inputs += new_inputs tx_outputs += new_outputs new_tx = { 'ins': tx_inputs, 'outs': tx_outputs, 'locktime': locktime, 'version': version, } new_unsigned_tx = btc_tx_serialize(new_tx) return new_unsigned_tx
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def btc_tx_der_encode_length(l): """ Return a DER-encoded length field Based on code from python-ecdsa (https://github.com/warner/python-ecdsa) by Brian Warner. Subject to the MIT license. """
if l < 0: raise ValueError("length cannot be negative") if l < 0x80: return int2byte(l) s = ("%x" % l).encode() if len(s) % 2: s = b("0") + s s = binascii.unhexlify(s) llen = len(s) return int2byte(0x80 | llen) + s
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def btc_tx_der_encode_sequence(*encoded_pieces): """ Return a DER-encoded sequence Based on code from python-ecdsa (https://github.com/warner/python-ecdsa) by Brian Warner. Subject to the MIT license. """
# borrowed from python-ecdsa total_len = sum([len(p) for p in encoded_pieces]) return b('\x30') + btc_tx_der_encode_length(total_len) + b('').join(encoded_pieces)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def btc_tx_sighash( tx, idx, script, hashcode=SIGHASH_ALL): """ Calculate the sighash of a non-segwit transaction. If it's SIGHASH_NONE, then digest the inputs but no outputs If it's SIGHASH_SINGLE, then digest all inputs and all outputs up to i (excluding values and scripts), and fully digest the ith input and output If it's (something) | SIGHASH_ANYONECANPAY, then only digest the ith input. Return the double-sha256 digest of the relevant fields. THIS DOES NOT WORK WITH SEGWIT OUTPUTS Adapted from https://github.com/vbuterin/pybitcointools, by Vitalik Buterin """
txobj = btc_tx_deserialize(tx) idx = int(idx) hashcode = int(hashcode) newtx = copy.deepcopy(txobj) # remove all scriptsigs in all inputs, except for the ith input's scriptsig. # the other inputs will be 'partially signed', except for SIGHASH_ANYONECANPAY mode. for i in xrange(0, len(newtx['ins'])): newtx['ins'][i]["script"] = '' if i == idx: if newtx['ins'][i].has_key('witness_script') and newtx['ins'][i]['witness_script']: raise ValueError('this method does not handle segwit inputs') if newtx['ins'][i].has_key('witness_script'): del newtx['ins'][i]['witness_script'] newtx["ins"][idx]["script"] = script if (hashcode & 0x1f) == SIGHASH_NONE: # don't care about the outputs with this signature newtx["outs"] = [] for inp in newtx['ins']: inp['sequence'] = 0 elif (hashcode & 0x1f) == SIGHASH_SINGLE: # only signing for this input. # all outputs after this input will not be signed. # all outputs before this input will be partially signed (but not their values or scripts) if len(newtx['ins']) > len(newtx['outs']): raise ValueError('invalid hash code: {} inputs but {} outputs'.format(len(newtx['ins']), len(newtx['outs']))) newtx["outs"] = newtx["outs"][:len(newtx["ins"])] for out in newtx["outs"][:len(newtx["ins"]) - 1]: out['value'] = 2**64 - 1 out['script'] = "" elif (hashcode & SIGHASH_ANYONECANPAY) != 0: # only going to sign this specific input, and nothing else newtx["ins"] = [newtx["ins"][idx]] signing_tx = btc_tx_serialize(newtx) sighash = btc_tx_get_hash( signing_tx, hashcode ) return sighash
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def btc_tx_sign_multisig_segwit(tx, idx, prevout_amount, witness_script, private_keys, hashcode=SIGHASH_ALL, hashcodes=None, native=False): """ Sign a native p2wsh or p2sh-p2wsh multisig input. @tx must be a hex-encoded tx Return the signed transaction """
from .multisig import parse_multisig_redeemscript if hashcodes is None: hashcodes = [hashcode] * len(private_keys) txobj = btc_tx_deserialize(str(tx)) privs = {} for pk in private_keys: pubk = ecdsalib.ecdsa_private_key(pk).public_key().to_hex() compressed_pubkey = keylib.key_formatting.compress(pubk) privs[compressed_pubkey] = pk m, public_keys = parse_multisig_redeemscript(witness_script) used_keys, sigs = [], [] for i, public_key in enumerate(public_keys): if public_key not in privs: continue if len(used_keys) == m: break if public_key in used_keys: raise ValueError('Tried to reuse key in witness script: {}'.format(public_key)) pk_str = privs[public_key] used_keys.append(public_key) sig = btc_tx_make_input_signature_segwit(tx, idx, prevout_amount, witness_script, pk_str, hashcodes[i]) sigs.append(sig) # print '' if len(used_keys) != m: raise ValueError('Missing private keys (used {}, required {})'.format(len(used_keys), m)) if native: # native p2wsh txobj['ins'][idx]['witness_script'] = btc_witness_script_serialize([None] + sigs + [witness_script]) # print 'segwit multisig: native p2wsh: witness script {}'.format(txobj['ins'][idx]['witness_script']) else: # p2sh-p2wsh redeem_script = btc_make_p2sh_p2wsh_redeem_script(witness_script) txobj['ins'][idx]['script'] = redeem_script txobj['ins'][idx]['witness_script'] = btc_witness_script_serialize([None] + sigs + [witness_script]) # print 'segwit multisig: p2sh p2wsh: witness script {}'.format(txobj['ins'][idx]['witness_script']) # print 'segwit multisig: p2sh p2wsh: redeem script {}'.format(txobj['ins'][idx]['script']) return btc_tx_serialize(txobj)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def btc_privkey_scriptsig_classify(private_key_info): """ What kind of scriptsig can this private key make? """
if btc_is_singlesig(private_key_info): return 'p2pkh' if btc_is_multisig(private_key_info): return 'p2sh' if btc_is_singlesig_segwit(private_key_info): return 'p2sh-p2wpkh' if btc_is_multisig_segwit(private_key_info): return 'p2sh-p2wsh' return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def btc_tx_sign_all_unsigned_inputs(private_key_info, prev_outputs, unsigned_tx_hex, scriptsig_type=None, segwit=None, **blockchain_opts): """ Sign all unsigned inputs with a given key. Use the given outputs to fund them. @private_key_info: either a hex private key, or a dict with 'private_keys' and 'redeem_script' defined as keys. @prev_outputs: a list of {'out_script': xxx, 'value': xxx} that are in 1-to-1 correspondence with the unsigned inputs in the tx ('value' is in satoshis) @unsigned_hex_tx: hex transaction with unsigned inputs Returns: signed hex transaction """
if segwit is None: segwit = get_features('segwit') txobj = btc_tx_deserialize(unsigned_tx_hex) inputs = txobj['ins'] if scriptsig_type is None: scriptsig_type = btc_privkey_scriptsig_classify(private_key_info) tx_hex = unsigned_tx_hex prevout_index = 0 # import json # print '' # print 'transaction:\n{}'.format(json.dumps(btc_tx_deserialize(unsigned_tx_hex), indent=4, sort_keys=True)) # print 'prevouts:\n{}'.format(json.dumps(prev_outputs, indent=4, sort_keys=True)) # print '' for i, inp in enumerate(inputs): do_witness_script = segwit if inp.has_key('witness_script'): do_witness_script = True elif segwit: # all inputs must receive a witness script, even if it's empty inp['witness_script'] = '' if (inp['script'] and len(inp['script']) > 0) or (inp.has_key('witness_script') and len(inp['witness_script']) > 0): continue if prevout_index >= len(prev_outputs): raise ValueError("Not enough prev_outputs ({} given, {} more prev-outputs needed)".format(len(prev_outputs), len(inputs) - prevout_index)) # tx with index i signed with privkey tx_hex = btc_tx_sign_input(str(unsigned_tx_hex), i, prev_outputs[prevout_index]['out_script'], prev_outputs[prevout_index]['value'], private_key_info, segwit=do_witness_script, scriptsig_type=scriptsig_type) unsigned_tx_hex = tx_hex prevout_index += 1 return tx_hex
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def block_header_serialize( inp ): """ Given block header information, serialize it and return the hex hash. inp has: * version (int) * prevhash (str) * merkle_root (str) * timestamp (int) * bits (int) * nonce (int) Based on code from pybitcointools (https://github.com/vbuterin/pybitcointools) by Vitalik Buterin """
# concatenate to form header o = encoding.encode(inp['version'], 256, 4)[::-1] + \ inp['prevhash'].decode('hex')[::-1] + \ inp['merkle_root'].decode('hex')[::-1] + \ encoding.encode(inp['timestamp'], 256, 4)[::-1] + \ encoding.encode(inp['bits'], 256, 4)[::-1] + \ encoding.encode(inp['nonce'], 256, 4)[::-1] # get (reversed) hash h = hashing.bin_sha256(hashing.bin_sha256(o))[::-1].encode('hex') assert h == inp['hash'], (hashing.bin_sha256(o).encode('hex'), inp['hash']) return o.encode('hex')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def block_header_to_hex( block_data, prev_hash ): """ Calculate the hex form of a block's header, given its getblock information from bitcoind. """
header_info = { "version": block_data['version'], "prevhash": prev_hash, "merkle_root": block_data['merkleroot'], "timestamp": block_data['time'], "bits": int(block_data['bits'], 16), "nonce": block_data['nonce'], "hash": block_data['hash'] } return block_header_serialize(header_info)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def block_header_verify( block_data, prev_hash, block_hash ): """ Verify whether or not bitcoind's block header matches the hash we expect. """
serialized_header = block_header_to_hex( block_data, prev_hash ) candidate_hash_bin_reversed = hashing.bin_double_sha256(binascii.unhexlify(serialized_header)) candidate_hash = binascii.hexlify( candidate_hash_bin_reversed[::-1] ) return block_hash == candidate_hash
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def save(self, force=False): """ Saves the configuration to a JSON, in the standard config location. Args: force (Optional[:obj:`bool`]): Continue writing, even if the original config file was not loaded properly. This is dangerous, because it could cause the previous configuration options to be lost. Defaults to :obj:`False`. Raises: :obj:`ConfigError`: if the configuration file was not successfully loaded on initialization of the class, and :obj:`force` is :obj:`False`. """
if (not self._success) and (not force): raise ConfigError(( 'The config file appears to be corrupted:\n\n' ' {fname}\n\n' 'Before attempting to save the configuration, please either ' 'fix the config file manually, or overwrite it with a blank ' 'configuration as follows:\n\n' ' from dustmaps.config import config\n' ' config.reset()\n\n' ).format(fname=self.fname)) with open(self.fname, 'w') as f: json.dump(self._options, f, indent=2)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def reset(self): """ Resets the configuration, and overwrites the existing configuration file. """
self._options = {} self.save(force=True) self._success = True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def run( self ): """ Interact with the blockchain peer, until we get a socket error or we exit the loop explicitly. The order of operations is: * send version * receive version * send verack * send getdata * receive blocks * for each block: * for each transaction with nulldata: * for each input: * get the transaction that produced the consumed input Return True on success Return False on error """
log.debug("Segwit support: {}".format(get_features('segwit'))) self.begin() try: self.loop() except socket.error, se: if not self.finished: # unexpected log.exception(se) return False # fetch remaining sender transactions try: self.fetch_sender_txs() except Exception, e: log.exception(e) return False # should be done now try: self.block_data_sanity_checks() except AssertionError, ae: log.exception(ae) return False return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def have_all_block_data(self): """ Have we received all block data? """
if not (self.num_blocks_received == self.num_blocks_requested): log.debug("num blocks received = %s, num requested = %s" % (self.num_blocks_received, self.num_blocks_requested)) return False return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def fetch_sender_txs(self): """ Fetch all sender txs via JSON-RPC, and merge them into our block data. Try backing off (up to 5 times) if we fail to fetch transactions via JSONRPC Return True on success Raise on error """
# fetch remaining sender transactions if len(self.sender_info.keys()) > 0: sender_txids = self.sender_info.keys()[:] sender_txid_batches = [] batch_size = 20 for i in xrange(0, len(sender_txids), batch_size ): sender_txid_batches.append( sender_txids[i:i+batch_size] ) for i in xrange(0, len(sender_txid_batches)): sender_txid_batch = sender_txid_batches[i] log.debug("Fetch %s TXs via JSON-RPC (%s-%s of %s)" % (len(sender_txid_batch), i * batch_size, i * batch_size + len(sender_txid_batch), len(sender_txids))) sender_txs = None for j in xrange(0, 5): sender_txs = self.fetch_txs_rpc( self.bitcoind_opts, sender_txid_batch ) if sender_txs is None: log.error("Failed to fetch transactions; trying again (%s of %s)" % (j+1, 5)) time.sleep(j+1) continue break if sender_txs is None: raise Exception("Failed to fetch transactions") # pair back up with nulldata transactions for sender_txid, sender_tx in sender_txs.items(): assert sender_txid in self.sender_info.keys(), "Unsolicited sender tx %s" % sender_txid # match sender outputs to the nulldata tx's inputs for nulldata_input_vout_index in self.sender_info[sender_txid].keys(): if sender_txid != "0000000000000000000000000000000000000000000000000000000000000000": # regular tx, not coinbase assert nulldata_input_vout_index < len(sender_tx['outs']), 'Output index {} is out of bounds for {}'.format(nulldata_input_vout_index, sender_txid) # save sender info self.add_sender_info(sender_txid, nulldata_input_vout_index, sender_tx['outs'][nulldata_input_vout_index]) else: # coinbase self.add_sender_info(sender_txid, nulldata_input_vout_index, sender_tx['outs'][0]) # update accounting self.num_txs_received += 1 return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def block_data_sanity_checks(self): """ Verify that the data we received makes sense. Return True on success Raise on error """
assert self.have_all_block_data(), "Still missing block data" assert self.num_txs_received == len(self.sender_info.keys()), "Num TXs received: %s; num TXs requested: %s" % (self.num_txs_received, len(self.sender_info.keys())) for (block_hash, block_info) in self.block_info.items(): for tx in block_info['txns']: assert None not in tx['senders'], "Missing one or more senders in %s; dump follows\n%s" % (tx['txid'], simplejson.dumps(tx, indent=4, sort_keys=True)) for i in range(0, len(tx['ins'])): inp = tx['ins'][i] sinfo = tx['senders'][i] assert sinfo['txid'] in self.sender_info, 'Surreptitious sender tx {}'.format(sinfo['txid']) assert inp['outpoint']['index'] == sinfo['nulldata_vin_outpoint'], 'Mismatched sender/input index ({}: {} != {}); dump follows\n{}'.format( sinfo['txid'], inp['outpoint']['index'], sinfo['nulldata_vin_outpoint'], simplejson.dumps(tx, indent=4, sort_keys=True)) assert inp['outpoint']['hash'] == sinfo['txid'], 'Mismatched sender/input txid ({} != {}); dump follows\n{}'.format(inp['txid'], sinfo['txid'], simplejson.dumps(tx, indent=4, sort_keys=True)) return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def begin(self): """ This method will implement the handshake of the Bitcoin protocol. It will send the Version message, and block until it receives a VerAck. Once we receive the version, we'll send the verack, and begin downloading. """
log.debug("handshake (version %s)" % PROTOCOL_VERSION) version = Version() version.services = 0 # can't send blocks log.debug("send Version") self.send_message(version)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_sender_info( self, sender_txhash, nulldata_vin_outpoint, sender_out_data ): """ Record sender information in our block info. @sender_txhash: txid of the sender @nulldata_vin_outpoint: the 'vout' index from the nulldata tx input that this transaction funded """
assert sender_txhash in self.sender_info.keys(), "Missing sender info for %s" % sender_txhash assert nulldata_vin_outpoint in self.sender_info[sender_txhash], "Missing outpoint %s for sender %s" % (nulldata_vin_outpoint, sender_txhash) block_hash = self.sender_info[sender_txhash][nulldata_vin_outpoint]['block_hash'] relindex = self.sender_info[sender_txhash][nulldata_vin_outpoint]['relindex'] relinput_index = self.sender_info[sender_txhash][nulldata_vin_outpoint]['relinput'] value_in_satoshis = sender_out_data['value'] script_pubkey = sender_out_data['script'] script_info = bits.btc_tx_output_parse_script(script_pubkey) script_type = script_info['type'] addresses = script_info.get('addresses', []) sender_info = { "value": value_in_satoshis, "script_pubkey": script_pubkey, "script_type": script_type, "addresses": addresses, "nulldata_vin_outpoint": nulldata_vin_outpoint, "txid": sender_txhash, } # debit this tx's total value self.block_info[block_hash]['txns'][relindex]['fee'] += value_in_satoshis # remember this sender, but put it in the right place. # senders[i] must correspond to tx['vin'][i] self.block_info[block_hash]['txns'][relindex]['senders'][relinput_index] = sender_info self.block_info[block_hash]['num_senders'] += 1 return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def make_sender_info( self, block_hash, txn, i, block_height ): """ Make sender information bundle for a particular input of a nulldata transaction. We'll use it to go find the transaction output that funded the ith input of the given tx. """
inp = txn['ins'][i] ret = { # to be filled in... 'scriptPubKey': None, 'addresses': None, # for matching the input and sender funded "txindex": txn['txindex'], "relindex": txn['relindex'], "output_index": inp['outpoint']['index'], "block_hash": block_hash, "relinput": i, "block_height": block_height, } return ret
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def fetch_txs_rpc( self, bitcoind_opts, txids ): """ Fetch the given list of transactions via the JSON-RPC interface. Return a dict of parsed transactions on success, keyed by txid. Return None on error """
headers = {'content-type': 'application/json'} reqs = [] ret = {} for i in xrange(0, len(txids)): txid = txids[i] if txid == "0000000000000000000000000000000000000000000000000000000000000000": # coinbase; we never send these ret[txid] = { 'version': 1, 'locktime': 0, 'ins': [], 'outs': [ { 'script': '', 'value': 0 # not really 0, but we don't care about coinbases anyway } ], } continue req = {'method': 'getrawtransaction', 'params': [txid, 0], 'jsonrpc': '2.0', 'id': i} reqs.append( req ) proto = "http" if bitcoind_opts.has_key('bitcoind_use_https') and bitcoind_opts['bitcoind_use_https']: proto = "https" server_url = "%s://%s:%s@%s:%s" % (proto, bitcoind_opts['bitcoind_user'], bitcoind_opts['bitcoind_passwd'], bitcoind_opts['bitcoind_server'], bitcoind_opts['bitcoind_port']) try: resp = requests.post( server_url, headers=headers, data=simplejson.dumps(reqs), verify=False ) except Exception, e: log.exception(e) log.error("Failed to fetch %s transactions" % len(txids)) return None # get responses try: resp_json = resp.json() assert type(resp_json) in [list] except Exception, e: log.exception(e) log.error("Failed to parse transactions") return None try: for resp in resp_json: assert 'result' in resp, "Missing result" txhex = resp['result'] assert txhex is not None, "Invalid RPC response '%s' (for %s)" % (simplejson.dumps(resp), txids[resp['id']]) if bits.btc_tx_is_segwit(txhex) and not get_features('segwit'): # no segwit support yet log.error("FATAL: SegWit transaction detected! Support for SegWit-formatted transactions is not yet activated") log.error("Please ensure your bitcoind node has `rpcserialversion=0` set.") log.error("Aborting...") os.abort() try: tx_bin = txhex.decode('hex') assert tx_bin is not None tx_hash_bin = hashing.bin_double_sha256(tx_bin)[::-1] assert tx_hash_bin is not None tx_hash = tx_hash_bin.encode('hex') assert tx_hash is not None except Exception, e: log.error("Failed to calculate txid of %s" % txhex) raise # solicited transaction? assert tx_hash in txids, "Unsolicited transaction %s" % tx_hash # unique? if tx_hash in ret.keys(): continue # parse from hex string txn_serializer = TxSerializer() txn = txn_serializer.deserialize( StringIO( binascii.unhexlify(txhex) ) ) ret[tx_hash] = self.parse_tx( txn, {}, "", -1 ) except Exception, e: log.exception(e) log.error("Failed to receive transactions") return None return ret
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def read_config(config_file=CONFIG_FILE_DEFAULT, override_url=None): ''' Read configuration file, perform sanity check and return configuration dictionary used by other functions.''' config = ConfigParser() config.read_dict(DEFAULT_SETTINGS) try: config.readfp(open(config_file)) logger.debug("Using config file at " + config_file) except: logger.error( "Could not find {0}, running with defaults.".format(config_file)) if not logger.handlers: # Before doing anything else, configure logging # Handlers might be already registered in repeated test suite runs # In production, this should never happen if config.getboolean("Logging", "to_file"): handler = logging.FileHandler(config.get("Logging", "file")) else: handler = logging.StreamHandler() handler.setFormatter(logging.Formatter( config.get("Logging", "format"))) logger.addHandler(handler) logger.setLevel(config.get("Logging", "level")) if override_url: config['Server']['url'] = override_url return config
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def check_config(config): ''' Check the executor config file for consistency. ''' # Check server URL url = config.get("Server", "url") try: urlopen(url) except Exception as e: logger.error( "The configured OpenSubmit server URL ({0}) seems to be invalid: {1}".format(url, e)) return False # Check directory specification targetdir = config.get("Execution", "directory") if platform.system() is not "Windows" and not targetdir.startswith("/"): logger.error( "Please use absolute paths, starting with a /, in your Execution-directory setting.") return False if not targetdir.endswith(os.sep): logger.error( "Your Execution-directory setting must end with a " + os.sep) return False return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def has_config(config_fname): ''' Determine if the given config file exists. ''' config = RawConfigParser() try: config.readfp(open(config_fname)) return True except IOError: return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def create_config(config_fname, override_url=None): ''' Create the config file from the defaults under the given name. ''' config_path = os.path.dirname(config_fname) os.makedirs(config_path, exist_ok=True) # Consider override URL. Only used by test suite runs settings = DEFAULT_SETTINGS_FLAT if override_url: settings['url'] = override_url # Create fresh config file, including new UUID with open(config_fname, 'wt') as config: config.write(DEFAULT_FILE_CONTENT.format(**settings)) return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def make_student(user): ''' Makes the given user a student. ''' tutor_group, owner_group = _get_user_groups() user.is_staff = False user.is_superuser = False user.save() owner_group.user_set.remove(user) owner_group.save() tutor_group.user_set.remove(user) tutor_group.save()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def make_tutor(user): ''' Makes the given user a tutor. ''' tutor_group, owner_group = _get_user_groups() user.is_staff = True user.is_superuser = False user.save() owner_group.user_set.remove(user) owner_group.save() tutor_group.user_set.add(user) tutor_group.save()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def make_owner(user): ''' Makes the given user a owner and tutor. ''' tutor_group, owner_group = _get_user_groups() user.is_staff = True user.is_superuser = False user.save() owner_group.user_set.add(user) owner_group.save() tutor_group.user_set.add(user) tutor_group.save()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def make_admin(user): ''' Makes the given user an admin. ''' tutor_group, owner_group = _get_user_groups() user.is_staff = True user.is_superuser = True user.save() owner_group.user_set.add(user) owner_group.save() tutor_group.user_set.add(user) tutor_group.save()