Instruction stringlengths 362 7.83k | output_code stringlengths 1 945 |
|---|---|
Given the code snippet: <|code_start|># (at your option) any later version.
#
# diffoscope is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with diffoscope. If not, see <https://www.gnu.org/licenses/>.
obj1 = load_fixture('test1.macho')
obj2 = load_fixture('test2.macho')
def test_obj_identification(obj1):
assert isinstance(obj1, MachoFile)
def test_obj_no_differences(obj1):
difference = obj1.compare(obj1)
assert difference is None
@pytest.fixture
def obj_differences(obj1, obj2):
return obj1.compare(obj2).details
@skip_unless_tools_exist('otool', 'lipo')
def test_obj_compare_non_existing(monkeypatch, obj1):
<|code_end|>
, generate the next line using the imports in this file:
import pytest
import os.path
from diffoscope.config import Config
from diffoscope.comparators.macho import MachoFile
from diffoscope.comparators.missing_file import MissingFile
from utils.data import load_fixture, get_data
from utils.tools import skip_unless_tools_exist
and context (functions, classes, or occasionally code) from other files:
# Path: diffoscope/config.py
# class Config(object):
# max_diff_block_lines = 256
# max_diff_block_lines_parent = 50
# max_diff_block_lines_saved = float("inf")
# # html-dir output uses ratio * max-diff-block-lines as its limit
# max_diff_block_lines_html_dir_ratio = 4
# # GNU diff cannot process arbitrary large files :(
# max_diff_input_lines = 2 ** 20
# max_report_size = 2000 * 2 ** 10 # 2000 kB
# max_text_report_size = 0
# max_report_child_size = 500 * 2 ** 10
# new_file = False
# fuzzy_threshold = 60
# enforce_constraints = True
# excludes = ()
#
# _singleton = {}
#
# def __init__(self):
# self.__dict__ = self._singleton
#
# def __setattr__(self, k, v):
# super(Config, self).__setattr__(k, v)
#
# if self.enforce_constraints:
# self.check_constraints()
#
# def check_constraints(self):
# if self.max_diff_block_lines < self.max_diff_block_lines_parent: # noqa
# raise ValueError("max_diff_block_lines ({0.max_diff_block_lines}) "
# "cannot be smaller than max_diff_block_lines_parent "
# "({0.max_diff_block_lines_parent})".format(self),
# )
#
# max_ = self.max_diff_block_lines_html_dir_ratio * \
# self.max_diff_block_lines
# if self.max_diff_block_lines_saved < max_: # noqa
# raise ValueError("max_diff_block_lines_saved "
# "({0.max_diff_block_lines_saved}) cannot be smaller than "
# "{0.max_diff_block_lines_html_dir_ratio} * "
# "max_diff_block_lines ({1})".format(self, max_),
# )
. Output only the next line. | monkeypatch.setattr(Config(), 'new_file', True) |
Using the snippet: <|code_start|>mo1 = load_fixture('test1.mo')
mo2 = load_fixture('test2.mo')
def test_identification(mo1):
assert isinstance(mo1, MoFile)
def test_no_differences(mo1):
difference = mo1.compare(mo1)
assert difference is None
@pytest.fixture
def differences(mo1, mo2):
return mo1.compare(mo2).details
@skip_unless_tools_exist('msgunfmt')
def test_diff(differences):
expected_diff = get_data('mo_expected_diff')
assert differences[0].unified_diff == expected_diff
mo_no_charset = load_fixture('test_no_charset.mo')
mo_iso8859_1 = load_fixture('test_iso8859-1.mo')
@skip_unless_tools_exist('msgunfmt')
def test_charsets(mo_no_charset, mo_iso8859_1):
difference = mo_no_charset.compare(mo_iso8859_1)
expected_diff = codecs.open(data('mo_charsets_expected_diff'), encoding='utf-8').read()
assert difference.details[0].unified_diff == expected_diff
@skip_unless_tools_exist('msgunfmt')
def test_compare_non_existing(monkeypatch, mo1):
<|code_end|>
, determine the next line of code. You have imports:
import codecs
import pytest
from diffoscope.config import Config
from diffoscope.comparators.missing_file import MissingFile
from diffoscope.comparators.gettext import MoFile
from utils.data import data, load_fixture, get_data
from utils.tools import skip_unless_tools_exist
and context (class names, function names, or code) available:
# Path: diffoscope/config.py
# class Config(object):
# max_diff_block_lines = 256
# max_diff_block_lines_parent = 50
# max_diff_block_lines_saved = float("inf")
# # html-dir output uses ratio * max-diff-block-lines as its limit
# max_diff_block_lines_html_dir_ratio = 4
# # GNU diff cannot process arbitrary large files :(
# max_diff_input_lines = 2 ** 20
# max_report_size = 2000 * 2 ** 10 # 2000 kB
# max_text_report_size = 0
# max_report_child_size = 500 * 2 ** 10
# new_file = False
# fuzzy_threshold = 60
# enforce_constraints = True
# excludes = ()
#
# _singleton = {}
#
# def __init__(self):
# self.__dict__ = self._singleton
#
# def __setattr__(self, k, v):
# super(Config, self).__setattr__(k, v)
#
# if self.enforce_constraints:
# self.check_constraints()
#
# def check_constraints(self):
# if self.max_diff_block_lines < self.max_diff_block_lines_parent: # noqa
# raise ValueError("max_diff_block_lines ({0.max_diff_block_lines}) "
# "cannot be smaller than max_diff_block_lines_parent "
# "({0.max_diff_block_lines_parent})".format(self),
# )
#
# max_ = self.max_diff_block_lines_html_dir_ratio * \
# self.max_diff_block_lines
# if self.max_diff_block_lines_saved < max_: # noqa
# raise ValueError("max_diff_block_lines_saved "
# "({0.max_diff_block_lines_saved}) cannot be smaller than "
# "{0.max_diff_block_lines_html_dir_ratio} * "
# "max_diff_block_lines ({1})".format(self, max_),
# )
. Output only the next line. | monkeypatch.setattr(Config(), 'new_file', True) |
Continue the code snippet: <|code_start|> self.url_base = URL_WIKIPEDIA_INDEX
self.setup_patterns()
def __str__(self):
"""String representation of object."""
return "<WikiPage(title={})>".format(self.title)
@property
def raw(self):
"""Raw wikitext from Wikipedia page."""
if self._raw is None:
self._raw = self.download()
return self._raw
def download(self):
"""Download page from Wikipedia.
Returns
-------
text: str
Content of page
Examples
--------
>>> wp = WikiPage('The Beatles')
>>> text = wp.download()
>>> 'John Lennon' in text
True
"""
<|code_end|>
. Use current file imports:
import re
import requests
from brede.config import config
from docopt import docopt
and context (classes, functions, or code) from other files:
# Path: brede/config.py
# CONFIG_FILENAMES = [
# 'brede.cfg',
# '~/etc/brede.cfg',
# '~/brede.cfg']
# DEFAULTS = """
# [requests]
# user_agent = brede
#
# [data]
# data_dir = ~/brede_data
# """
. Output only the next line. | user_agent = config.get('requests', 'user_agent') |
Continue the code snippet: <|code_start|>URL_BASE_WIKIPEDIA = "https://en.wikipedia.org/w/"
class WikiError(Exception):
"""Exception for misspecified wiki."""
pass
class Wiki(object):
"""Represents an interface to a MediaWiki instance."""
def __init__(self, wiki=None, url=None):
"""Setup URL etc. to MediaWiki API."""
if wiki is None and url is None:
self.wiki = 'Brede'
self.url_base = URL_BASE_BREDE
elif url is None:
if wiki.lower() == 'brede':
self.wiki = 'Brede'
self.url_base = URL_BASE_BREDE
elif wiki.lower() == 'wikipedia':
self.wiki = 'Wikipedia'
self.url_base = URL_BASE_WIKIPEDIA
else:
raise WikiError(("Wrong 'wiki' specified. ",
"should be 'brede' or 'wikipedia'"))
else:
self.url = url
<|code_end|>
. Use current file imports:
import sys
import requests
from brede.config import config
from docopt import docopt
and context (classes, functions, or code) from other files:
# Path: brede/config.py
# CONFIG_FILENAMES = [
# 'brede.cfg',
# '~/etc/brede.cfg',
# '~/brede.cfg']
# DEFAULTS = """
# [requests]
# user_agent = brede
#
# [data]
# data_dir = ~/brede_data
# """
. Output only the next line. | self.user_agent = config.get('requests', 'user_agent') |
Based on the snippet: <|code_start|> directory http://neuro.compute.dtu.dk/services/bredewiki/download/
It is loaded with automagically via the DB class in the db.py module,
os that tables are available from BredeWikiTemplates().tables.
Example
-------
>>> bwt = BredeWikiTemplates()
>>> papers = bwt.tables.brede_paper.all()
>>> 11227136 in set(papers._pmid.dropna().astype(int))
True
>>> # Brain regions from LPBA40 brain atlas
>>> brain_regions = bwt.tables.brede_brain_region.all()
>>> lpba_regions = brain_regions.ix[brain_regions._lpba.notnull(),
... ['_name', '_lpba']]
>>> 'Brain stem' in set(lpba_regions['_name'])
True
"""
def __init__(self, redownload=False):
"""Setup directories and filenames.
Parameters
----------
redownload : bool
Download the database file a new.
"""
<|code_end|>
, predict the immediate next line with the help of imports:
from os import makedirs
from os.path import exists, expanduser, join
from urllib import urlretrieve
from brede.config import config
from docopt import docopt
import db
and context (classes, functions, sometimes code) from other files:
# Path: brede/config.py
# CONFIG_FILENAMES = [
# 'brede.cfg',
# '~/etc/brede.cfg',
# '~/brede.cfg']
# DEFAULTS = """
# [requests]
# user_agent = brede
#
# [data]
# data_dir = ~/brede_data
# """
. Output only the next line. | self.data_dir = expanduser(config.get('data', 'data_dir')) |
Given the following code snippet before the placeholder: <|code_start|>Usage:
questionanalysis.py [options] <question>
Options:
-h --help Help
Examples:
python -m brede.qa.questionanalysis "Where did Uta Frith get her degree?"
"""
from __future__ import absolute_import, division, print_function
class Question(object):
"""Represent a question and its analysis.
Examples
--------
>>> question = Question("Where did Uta Frith get her degree?")
"""
def __init__(self, question):
"""Setup question and API connections."""
self.question = question
<|code_end|>
, predict the next line using imports from the current file:
import json
import docopt
from lazy import lazy
from nltk import ne_chunk, pos_tag, word_tokenize
from ..api.wikidata import Wikidata
and context including class names, function names, and sometimes code from other files:
# Path: brede/api/wikidata.py
# class Wikidata(object):
# """Interface to wikidata.org."""
#
# def __init__(self):
# """Setup credentials for an IBM Watson instance."""
# self.user_agent = config.get('requests', 'user_agent')
# self.language = 'en'
#
# def headers(self):
# """Return dict with header information for request."""
# return {'user-agent': self.user_agent}
#
# def find_entities(self, query, limit=7):
# """Return entities from a Wikidata search.
#
# Parameters
# ----------
# query : str
# String with query
# limit : int, optional
# Maximum number of results to return
#
# Returns
# -------
# entities : generator
# Generator with entities
#
# """
# if limit < 1:
# raise StopIteration
#
# params = {'action': 'wbsearchentities',
# 'language': self.language,
# 'format': 'json',
# 'limit': limit,
# 'search': query}
#
# index, running = 1, True
# while running:
# response = requests.get(
# API_URL, params=params,
# headers=self.headers()).json()
# entities = response['search']
# for entity in entities:
# yield entity
# if index >= limit:
# running = False
# break
# index += 1
# if 'search-continue' in response:
# params['continue'] = response['search-continue']
# else:
# running = False
#
# def find_entity(self, query):
# """Return first entity from a Wikidata search.
#
# Parameters
# ----------
# query : str
# Query string.
#
# Returns
# -------
# entity : dict
# Dictionary with entity information. An empty dict is returned if no
# entity is identified.
#
# """
# try:
# entity = next(self.find_entities(query, limit=1))
# except StopIteration:
# entity = {}
# return entity
#
# def find_entity_id(self, query):
# """Return first id of entity from a Wikidata search.
#
# Parameters
# ----------
# query : str
# Query for item to find.
#
# Returns
# -------
# id_ : str
# String with entity identifier
#
# Examples
# --------
# >>> wikidata = Wikidata()
# >>> id_ = wikidata.find_entity_id('Barack Obama')
# >>> str(id_) # Python 2 returns Unicode
# 'Q76'
#
# """
# entity = self.find_entity(query)
# id_ = entity['id']
# return id_
. Output only the next line. | self.wikidata = Wikidata() |
Next line prediction: <|code_start|>#!/usr/bin/env python
"""Interface to Wikidata.
Usage:
wikidata.py [options] [<query>]
Options:
-h --help Help
--id Return id if relevant
--limit=<n> Limit for the number of results [default: 1]
Example:
$ python -m brede.api.wikidata --id "Helle Thorning"
"""
from __future__ import absolute_import, division, print_function
API_URL = "https://www.wikidata.org/w/api.php"
class Wikidata(object):
"""Interface to wikidata.org."""
def __init__(self):
"""Setup credentials for an IBM Watson instance."""
<|code_end|>
. Use current file imports:
(import json
import requests
import docopt
from ..config import config)
and context including class names, function names, or small code snippets from other files:
# Path: brede/config.py
# CONFIG_FILENAMES = [
# 'brede.cfg',
# '~/etc/brede.cfg',
# '~/brede.cfg']
# DEFAULTS = """
# [requests]
# user_agent = brede
#
# [data]
# data_dir = ~/brede_data
# """
. Output only the next line. | self.user_agent = config.get('requests', 'user_agent') |
Here is a snippet: <|code_start|>
X = SA + m
Sklearn's FastICA implementation is used.
Parameters
----------
n_components : int, optional
Number of ICA components.
Returns
-------
source : Matrix
Estimated source matrix (S)
mixing_matrix : Matrix
Estimated mixing matrix (A)
mean_vector : brede.core.vector.Vector
Estimated mean vector
References
----------
http://scikit-learn.org/stable/modules/decomposition.html#ica
"""
if n_components is None:
n_components = int(np.ceil(np.sqrt(float(min(self.shape)) / 2)))
ica = FastICA(n_components=n_components)
sources = Matrix(ica.fit_transform(self.values), index=self.index)
mixing_matrix = Matrix(ica.mixing_.T, columns=self.columns)
<|code_end|>
. Write the next line using the current file imports:
from matplotlib.pyplot import matshow, pause
from pandas import DataFrame
from scipy.sparse import csr_matrix
from sklearn.decomposition import FastICA
from .vector import Vector
import numpy as np
and context from other files:
# Path: brede/core/vector.py
# class Vector(Series):
# """Extended series object."""
#
# @property
# def _constructor(self):
# return Vector
, which may include functions, classes, or code. Output only the next line. | mean_vector = Vector(ica.mean_, index=self.columns) |
Predict the next line after this snippet: <|code_start|>
def csp_with_two_electrodes():
"""Example for common spatial patterns."""
# Generate data
N = 100
A0 = np.array([[1, 4], [1, 1]])
A1 = np.array([[4, 1], [1, 1]])
y = np.zeros(N)
indices0 = np.arange(N // 2)
indices1 = np.arange(N // 2, N)
y[indices1] = 1
X = npr.randn(N, 2)
X[indices0, :] = X[indices0, :].dot(A0)
X[indices1, :] = X[indices1, :].dot(A1)
plt.figure(figsize=(10, 5))
# Plot data
plt.subplot(1, 2, 1)
plt.plot(X[indices0, 0], X[indices0, 1], 'or')
plt.hold(True)
plt.plot(X[indices1, 0], X[indices1, 1], 'xb')
plt.axis('equal')
plt.legend(['Class 0', 'Class 1'])
plt.xlabel('C3')
plt.ylabel('C4')
plt.title('Simulated EEG data')
# Estimate common spatial patterns
<|code_end|>
using the current file's imports:
import matplotlib.pyplot as plt
import numpy as np
import numpy.random as npr
import docopt
from ..csp import CSP
and any relevant context from other files:
# Path: brede/eeg/csp.py
# class CSP(base.BaseEstimator, base.TransformerMixin):
# """Common spatial patterns.
#
# References
# ----------
# Christian Andreas Kothe, Lecture 7.3 Common Spatial Patterns
# https://www.youtube.com/watch?v=zsOULC16USU
#
# EEGTools
# https://github.com/breuderink/eegtools/
# blob/master/examples/ex_csp_motor_imagery.py
#
# """
#
# def __init__(self, n_components=None):
# """Setup paramters."""
# self.n_components = n_components
#
# @staticmethod
# def class_correlations(X, y):
# """Return list of class correlations."""
# class_correlations = [
# np.corrcoef(X[y == class_indicator, :], rowvar=0)
# for class_indicator in np.unique(y)]
# return class_correlations
#
# @staticmethod
# def class_covariances(X, y):
# """Return list of class covariances."""
# class_covariances = [
# np.cov(X[y == class_indicator, :], rowvar=0)
# for class_indicator in np.unique(y)]
# return class_covariances
#
# def fit(self, X, y):
# """Fit common spatial patterns.
#
# Projection with the generalized eigenvalue problem
#
# eig(class_covariance0, sum(covariances))
#
# The weights are ordered so the eigenvectors associated with
# the largest eigenvalue is first.
#
# Parameters
# ----------
# X : array_like
# Data matrix (time points x channels) for training set
# y : 1D array_like
# Vector with class indicator variables
#
# Returns
# -------
# self : CSP
# The self object
#
# References
# ----------
# Christian Andreas Kothe, Lecture 7.3 Common Spatial Patterns
# https://www.youtube.com/watch?v=zsOULC16USU
#
# EEGTools-like
# https://github.com/breuderink/eegtools/
# blob/master/examples/ex_csp_motor_imagery.py
#
# Common spatial pattern
# https://en.wikipedia.org/wiki/Common_spatial_pattern
#
# """
# # Generalized eigenvalue problem on the class covariances
# class_covariances = self.class_covariances(X, y)
# total_covariance = sum(class_covariances)
# eigenvalues, eigenvectors = eig(class_covariances[0],
# total_covariance)
#
# # Reorder data
# eigenvalues = np.real(eigenvalues)
# indices = np.argsort(-eigenvalues)
# eigenvalues = eigenvalues[indices]
# eigenvectors = eigenvectors[:, indices]
#
# # The model parameters
# if self.n_components is None:
# self.weights_ = eigenvectors
# else:
# self.weights_ = eigenvectors[:, :self.n_components]
#
# return self
#
# def transform(self, X):
# """Project data matrix with CSP.
#
# Parameters
# ----------
# X : array_like
# Data matrix
#
# Returns
# -------
# X_new : array_like
# Projected data matrix (time points x latent variables)
#
# """
# return X.dot(self.weights_)
. Output only the next line. | csp = CSP() |
Predict the next line after this snippet: <|code_start|>
from __future__ import print_function, absolute_import
try:
except ImportError:
try:
except ImportError:
class Pubmed(object):
"""Interface to Pubmed bibliographic information.
PubMed data in the form of MEDLINE records are cache on the local
filesystem as pickle data in the data directory. If the record is not
available locally it is fetch via the Internet on the Entrez server.
"""
last_download_time = datetime.now()
def __init__(self):
"""Setup directories."""
self.logger = logging.getLogger(__name__ + '.Pubmed')
self.logger.addHandler(logging.NullHandler())
<|code_end|>
using the current file's imports:
import errno
import logging
import os
import time
import cPickle as pickle
import pickle
from datetime import datetime
from os.path import expanduser, join
from ConfigParser import NoSectionError
from configparser import NoSectionError
from Bio import Entrez, Medline
from brede.config import config
from docopt import docopt
and any relevant context from other files:
# Path: brede/config.py
# CONFIG_FILENAMES = [
# 'brede.cfg',
# '~/etc/brede.cfg',
# '~/brede.cfg']
# DEFAULTS = """
# [requests]
# user_agent = brede
#
# [data]
# data_dir = ~/brede_data
# """
. Output only the next line. | self.data_dir = expanduser(config.get('data', 'data_dir')) |
Based on the snippet: <|code_start|>"""Test of csp."""
@pytest.fixture
def data():
"""Setup a simulated data set."""
class Data():
N = 100
y = np.zeros(N)
indices0 = np.arange(N // 2)
indices1 = np.arange(N // 2, N)
y[indices1] = 1
A0 = np.array([[1, 4], [1, 1]])
A1 = np.array([[4, 1], [1, 1]])
X = npr.randn(N, 2)
X[indices0, :] = X[indices0, :].dot(A0)
X[indices1, :] = X[indices1, :].dot(A1)
return Data()
def test_csp(data):
"""Test common spatial patterns."""
<|code_end|>
, predict the immediate next line with the help of imports:
import numpy as np
import numpy.random as npr
import pytest
from ..csp import CSP
and context (classes, functions, sometimes code) from other files:
# Path: brede/eeg/csp.py
# class CSP(base.BaseEstimator, base.TransformerMixin):
# """Common spatial patterns.
#
# References
# ----------
# Christian Andreas Kothe, Lecture 7.3 Common Spatial Patterns
# https://www.youtube.com/watch?v=zsOULC16USU
#
# EEGTools
# https://github.com/breuderink/eegtools/
# blob/master/examples/ex_csp_motor_imagery.py
#
# """
#
# def __init__(self, n_components=None):
# """Setup paramters."""
# self.n_components = n_components
#
# @staticmethod
# def class_correlations(X, y):
# """Return list of class correlations."""
# class_correlations = [
# np.corrcoef(X[y == class_indicator, :], rowvar=0)
# for class_indicator in np.unique(y)]
# return class_correlations
#
# @staticmethod
# def class_covariances(X, y):
# """Return list of class covariances."""
# class_covariances = [
# np.cov(X[y == class_indicator, :], rowvar=0)
# for class_indicator in np.unique(y)]
# return class_covariances
#
# def fit(self, X, y):
# """Fit common spatial patterns.
#
# Projection with the generalized eigenvalue problem
#
# eig(class_covariance0, sum(covariances))
#
# The weights are ordered so the eigenvectors associated with
# the largest eigenvalue is first.
#
# Parameters
# ----------
# X : array_like
# Data matrix (time points x channels) for training set
# y : 1D array_like
# Vector with class indicator variables
#
# Returns
# -------
# self : CSP
# The self object
#
# References
# ----------
# Christian Andreas Kothe, Lecture 7.3 Common Spatial Patterns
# https://www.youtube.com/watch?v=zsOULC16USU
#
# EEGTools-like
# https://github.com/breuderink/eegtools/
# blob/master/examples/ex_csp_motor_imagery.py
#
# Common spatial pattern
# https://en.wikipedia.org/wiki/Common_spatial_pattern
#
# """
# # Generalized eigenvalue problem on the class covariances
# class_covariances = self.class_covariances(X, y)
# total_covariance = sum(class_covariances)
# eigenvalues, eigenvectors = eig(class_covariances[0],
# total_covariance)
#
# # Reorder data
# eigenvalues = np.real(eigenvalues)
# indices = np.argsort(-eigenvalues)
# eigenvalues = eigenvalues[indices]
# eigenvectors = eigenvectors[:, indices]
#
# # The model parameters
# if self.n_components is None:
# self.weights_ = eigenvectors
# else:
# self.weights_ = eigenvectors[:, :self.n_components]
#
# return self
#
# def transform(self, X):
# """Project data matrix with CSP.
#
# Parameters
# ----------
# X : array_like
# Data matrix
#
# Returns
# -------
# X_new : array_like
# Projected data matrix (time points x latent variables)
#
# """
# return X.dot(self.weights_)
. Output only the next line. | csp = CSP() |
Given the code snippet: <|code_start|> evidence['title'],
text))
class Watson(object):
"""Interface to IBM Watson.
The use of this class requires credentials to an IBM Watson instance.
User, password and API URL are read from the brede.config if it is not
specified.
Example
-------
>>> try:
... api = Watson()
... except WatsonMissingConfig:
... # Watson not available, so we don't test for queries.
... print(True)
... else:
... answer = api.ask('Who was called John?')
... 'question' in answer
True
"""
def __init__(self, user=None, password=None, url=None):
"""Setup credentials for an IBM Watson instance."""
if user is None and password is None and url is None:
self.check_config()
<|code_end|>
, generate the next line using the imports in this file:
import json
import requests
import yaml
from itertools import islice
from brede.config import config
from docopt import docopt
and context (functions, classes, or occasionally code) from other files:
# Path: brede/config.py
# CONFIG_FILENAMES = [
# 'brede.cfg',
# '~/etc/brede.cfg',
# '~/brede.cfg']
# DEFAULTS = """
# [requests]
# user_agent = brede
#
# [data]
# data_dir = ~/brede_data
# """
. Output only the next line. | self.user = user or config.get('watson', 'user') |
Based on the snippet: <|code_start|># Copyright 2016 by MPI-SWS and Data-Ken Research.
# Licensed under the Apache 2.0 License.
"""Transducers for streams.
A transducer maintains internal state which is updated every time
on_next is called. It implements a function
f: Input X State -> Output X State
For those who speak automata, this is a Mealy machine.
"""
class Transducer:
def __init__(self):
pass
def step(self, v):
return v # return the transformed value
def complete(self):
"""Can optionally return a final value
"""
pass
<|code_end|>
, predict the immediate next line with the help of imports:
from collections import deque
from statistics import median
from antevents.base import Publisher, Filter, SensorEvent, filtermethod
and context (classes, functions, sometimes code) from other files:
# Path: antevents/base.py
# class DefaultSubscriber:
# class CallableAsSubscriber:
# class FatalError(Exception):
# class InvalidTopicError(FatalError):
# class UnknownTopicError(FatalError):
# class TopicAlreadyClosed(FatalError):
# class ExcInDispatch(FatalError):
# class Publisher:
# class Filter(Publisher, DefaultSubscriber):
# class _ThunkBuilder:
# class DirectPublisherMixin:
# class EventLoopPublisherMixin:
# class IterableAsPublisher(Publisher, DirectPublisherMixin):
# class FunctionIteratorAsPublisher(Publisher, DirectPublisherMixin):
# class SensorPub(Publisher, DirectPublisherMixin):
# class BlockingSubscriber:
# class _ThreadForBlockingSubscriber(threading.Thread):
# class _ThreadForBlockingPublisher(threading.Thread):
# class ScheduleError(FatalError):
# class Scheduler:
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def _on_next_name(topic):
# def _on_error_name(topic):
# def _on_completed_name(topic):
# def __init__(self, on_next=None, on_error=None, on_completed=None,
# topic=None):
# def default_error(err):
# def __str__(self):
# def __init__(self, topics=None):
# def subscribe(self, subscriber, topic_mapping=None):
# def dispose():
# def _schedule(self, enqueue_fn):
# def _close_topic(self, topic):
# def _dispatch_next(self, x, topic=None):
# def _dispatch_completed(self, topic=None):
# def _dispatch_error(self, e, topic=None):
# def print_downstream(self):
# def has_subscribers(step):
# def print_from(current_seq, step):
# def pp_subscribers(self):
# def __init__(self, previous_in_chain,
# on_next=None, on_completed=None,
# on_error=None, name=None):
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def __str__(self):
# def _is_thunk(t):
# def _make_thunk(t):
# def __init__(self, func):
# def __call__(self, *args, **kwargs):
# def apply(this):
# def __repr__(self):
# def _subscribe_thunk(prev, thunk):
# def filtermethod(base, alias=None):
# def inner(func):
# def _observe(self):
# def _observe_event_loop(self):
# def _stop_loop(self):
# def __init__(self, iterable, name=None):
# def _observe(self):
# def _close(self):
# def __str__(self):
# def from_iterable(i):
# def from_list(l):
# def __init__(self, initial_state, condition, iterate, result_selector):
# def _observe(self):
# def from_func(init, cond, iter, selector):
# def __str__(self):
# def make_sensor_event(sensor, sample):
# def __init__(self, sensor, make_event_fn=make_sensor_event):
# def _observe(self):
# def __repr__(self):
# def __init__(self, scheduler, topics=None):
# def start():
# def request_stop(self):
# def _wait_and_dispatch(self):
# def _on_next(self, topic, x):
# def _on_completed(self, topic):
# def _on_error(self, topic, e):
# def _close(self):
# def __init__(self, subscriber, scheduler):
# def run(self):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, publisher, interval, scheduler):
# def _stop_loop(self):
# def run(self):
# def enqueue_fn(fn, *args):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, event_loop):
# def exception_handler(loop, context):
# def _remove_from_active_schedules(self, publisher):
# def schedule_periodic(self, publisher, interval):
# def cancel():
# def run():
# def schedule_sensor(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event,
# print_downstream=False):
# def schedule_recurring(self, publisher):
# def cancel():
# def run():
# def schedule_on_main_event_loop(self, publisher):
# def stop():
# def schedule_on_private_event_loop(self, publisher):
# def enqueue_fn(fn, *args):
# def thread_main():
# def die(): # need to stop the scheduler in the main loop
# def loop_done():
# def schedule_periodic_on_separate_thread(self, publisher, interval):
# def schedule_sensor_on_separate_thread(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event):
# def schedule_later_one_time(self, publisher, interval):
# def cancel():
# def run():
# def run_forever(self):
# def _schedule_coroutine(self, coro, done_callback):
# def cb(f):
# def stop(self):
# def recheck_stop(f):
. Output only the next line. | @filtermethod(Publisher) |
Predict the next line after this snippet: <|code_start|>@filtermethod(Publisher)
def transduce(this, xform):
"""Execute a (stateful) transducer to transform the event sequence.
The transducer provides a step() method to accept a value
and return the transformation. If the step() returns None,
no output event is emitted
Keyword arguments:
:param Transducer transducer: A transducer to execute.
:returns: An Publisher sequence containing the results from the
transducer.
:rtype: Publisher
"""
def on_next(self, x):
# The base Filter class will handle any exceptions thrown by the
# step() call. Don't call _dispatch_error here(), as it will result
# in it being called twice. TODO: This is somewhat error prone - need to
# think through tis a bit more.
x_prime = xform.step(x)
if x_prime is not None:
self._dispatch_next(x_prime)
def on_completed(self):
x_prime = xform.complete()
# there may be a final event to be sent out upon completion
if x_prime is not None:
self._dispatch_next(x_prime)
self._dispatch_completed()
<|code_end|>
using the current file's imports:
from collections import deque
from statistics import median
from antevents.base import Publisher, Filter, SensorEvent, filtermethod
and any relevant context from other files:
# Path: antevents/base.py
# class DefaultSubscriber:
# class CallableAsSubscriber:
# class FatalError(Exception):
# class InvalidTopicError(FatalError):
# class UnknownTopicError(FatalError):
# class TopicAlreadyClosed(FatalError):
# class ExcInDispatch(FatalError):
# class Publisher:
# class Filter(Publisher, DefaultSubscriber):
# class _ThunkBuilder:
# class DirectPublisherMixin:
# class EventLoopPublisherMixin:
# class IterableAsPublisher(Publisher, DirectPublisherMixin):
# class FunctionIteratorAsPublisher(Publisher, DirectPublisherMixin):
# class SensorPub(Publisher, DirectPublisherMixin):
# class BlockingSubscriber:
# class _ThreadForBlockingSubscriber(threading.Thread):
# class _ThreadForBlockingPublisher(threading.Thread):
# class ScheduleError(FatalError):
# class Scheduler:
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def _on_next_name(topic):
# def _on_error_name(topic):
# def _on_completed_name(topic):
# def __init__(self, on_next=None, on_error=None, on_completed=None,
# topic=None):
# def default_error(err):
# def __str__(self):
# def __init__(self, topics=None):
# def subscribe(self, subscriber, topic_mapping=None):
# def dispose():
# def _schedule(self, enqueue_fn):
# def _close_topic(self, topic):
# def _dispatch_next(self, x, topic=None):
# def _dispatch_completed(self, topic=None):
# def _dispatch_error(self, e, topic=None):
# def print_downstream(self):
# def has_subscribers(step):
# def print_from(current_seq, step):
# def pp_subscribers(self):
# def __init__(self, previous_in_chain,
# on_next=None, on_completed=None,
# on_error=None, name=None):
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def __str__(self):
# def _is_thunk(t):
# def _make_thunk(t):
# def __init__(self, func):
# def __call__(self, *args, **kwargs):
# def apply(this):
# def __repr__(self):
# def _subscribe_thunk(prev, thunk):
# def filtermethod(base, alias=None):
# def inner(func):
# def _observe(self):
# def _observe_event_loop(self):
# def _stop_loop(self):
# def __init__(self, iterable, name=None):
# def _observe(self):
# def _close(self):
# def __str__(self):
# def from_iterable(i):
# def from_list(l):
# def __init__(self, initial_state, condition, iterate, result_selector):
# def _observe(self):
# def from_func(init, cond, iter, selector):
# def __str__(self):
# def make_sensor_event(sensor, sample):
# def __init__(self, sensor, make_event_fn=make_sensor_event):
# def _observe(self):
# def __repr__(self):
# def __init__(self, scheduler, topics=None):
# def start():
# def request_stop(self):
# def _wait_and_dispatch(self):
# def _on_next(self, topic, x):
# def _on_completed(self, topic):
# def _on_error(self, topic, e):
# def _close(self):
# def __init__(self, subscriber, scheduler):
# def run(self):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, publisher, interval, scheduler):
# def _stop_loop(self):
# def run(self):
# def enqueue_fn(fn, *args):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, event_loop):
# def exception_handler(loop, context):
# def _remove_from_active_schedules(self, publisher):
# def schedule_periodic(self, publisher, interval):
# def cancel():
# def run():
# def schedule_sensor(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event,
# print_downstream=False):
# def schedule_recurring(self, publisher):
# def cancel():
# def run():
# def schedule_on_main_event_loop(self, publisher):
# def stop():
# def schedule_on_private_event_loop(self, publisher):
# def enqueue_fn(fn, *args):
# def thread_main():
# def die(): # need to stop the scheduler in the main loop
# def loop_done():
# def schedule_periodic_on_separate_thread(self, publisher, interval):
# def schedule_sensor_on_separate_thread(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event):
# def schedule_later_one_time(self, publisher, interval):
# def cancel():
# def run():
# def run_forever(self):
# def _schedule_coroutine(self, coro, done_callback):
# def cb(f):
# def stop(self):
# def recheck_stop(f):
. Output only the next line. | return Filter(this, on_next=on_next, on_completed=on_completed, |
Continue the code snippet: <|code_start|># Copyright 2016 by MPI-SWS and Data-Ken Research.
# Licensed under the Apache 2.0 License.
"""Transducers for streams.
A transducer maintains internal state which is updated every time
on_next is called. It implements a function
f: Input X State -> Output X State
For those who speak automata, this is a Mealy machine.
"""
class Transducer:
def __init__(self):
pass
def step(self, v):
return v # return the transformed value
def complete(self):
"""Can optionally return a final value
"""
pass
<|code_end|>
. Use current file imports:
from collections import deque
from statistics import median
from antevents.base import Publisher, Filter, SensorEvent, filtermethod
and context (classes, functions, or code) from other files:
# Path: antevents/base.py
# class DefaultSubscriber:
# class CallableAsSubscriber:
# class FatalError(Exception):
# class InvalidTopicError(FatalError):
# class UnknownTopicError(FatalError):
# class TopicAlreadyClosed(FatalError):
# class ExcInDispatch(FatalError):
# class Publisher:
# class Filter(Publisher, DefaultSubscriber):
# class _ThunkBuilder:
# class DirectPublisherMixin:
# class EventLoopPublisherMixin:
# class IterableAsPublisher(Publisher, DirectPublisherMixin):
# class FunctionIteratorAsPublisher(Publisher, DirectPublisherMixin):
# class SensorPub(Publisher, DirectPublisherMixin):
# class BlockingSubscriber:
# class _ThreadForBlockingSubscriber(threading.Thread):
# class _ThreadForBlockingPublisher(threading.Thread):
# class ScheduleError(FatalError):
# class Scheduler:
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def _on_next_name(topic):
# def _on_error_name(topic):
# def _on_completed_name(topic):
# def __init__(self, on_next=None, on_error=None, on_completed=None,
# topic=None):
# def default_error(err):
# def __str__(self):
# def __init__(self, topics=None):
# def subscribe(self, subscriber, topic_mapping=None):
# def dispose():
# def _schedule(self, enqueue_fn):
# def _close_topic(self, topic):
# def _dispatch_next(self, x, topic=None):
# def _dispatch_completed(self, topic=None):
# def _dispatch_error(self, e, topic=None):
# def print_downstream(self):
# def has_subscribers(step):
# def print_from(current_seq, step):
# def pp_subscribers(self):
# def __init__(self, previous_in_chain,
# on_next=None, on_completed=None,
# on_error=None, name=None):
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def __str__(self):
# def _is_thunk(t):
# def _make_thunk(t):
# def __init__(self, func):
# def __call__(self, *args, **kwargs):
# def apply(this):
# def __repr__(self):
# def _subscribe_thunk(prev, thunk):
# def filtermethod(base, alias=None):
# def inner(func):
# def _observe(self):
# def _observe_event_loop(self):
# def _stop_loop(self):
# def __init__(self, iterable, name=None):
# def _observe(self):
# def _close(self):
# def __str__(self):
# def from_iterable(i):
# def from_list(l):
# def __init__(self, initial_state, condition, iterate, result_selector):
# def _observe(self):
# def from_func(init, cond, iter, selector):
# def __str__(self):
# def make_sensor_event(sensor, sample):
# def __init__(self, sensor, make_event_fn=make_sensor_event):
# def _observe(self):
# def __repr__(self):
# def __init__(self, scheduler, topics=None):
# def start():
# def request_stop(self):
# def _wait_and_dispatch(self):
# def _on_next(self, topic, x):
# def _on_completed(self, topic):
# def _on_error(self, topic, e):
# def _close(self):
# def __init__(self, subscriber, scheduler):
# def run(self):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, publisher, interval, scheduler):
# def _stop_loop(self):
# def run(self):
# def enqueue_fn(fn, *args):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, event_loop):
# def exception_handler(loop, context):
# def _remove_from_active_schedules(self, publisher):
# def schedule_periodic(self, publisher, interval):
# def cancel():
# def run():
# def schedule_sensor(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event,
# print_downstream=False):
# def schedule_recurring(self, publisher):
# def cancel():
# def run():
# def schedule_on_main_event_loop(self, publisher):
# def stop():
# def schedule_on_private_event_loop(self, publisher):
# def enqueue_fn(fn, *args):
# def thread_main():
# def die(): # need to stop the scheduler in the main loop
# def loop_done():
# def schedule_periodic_on_separate_thread(self, publisher, interval):
# def schedule_sensor_on_separate_thread(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event):
# def schedule_later_one_time(self, publisher, interval):
# def cancel():
# def run():
# def run_forever(self):
# def _schedule_coroutine(self, coro, done_callback):
# def cb(f):
# def stop(self):
# def recheck_stop(f):
. Output only the next line. | @filtermethod(Publisher) |
Given snippet: <|code_start|>
def on_t_low_next(self, event):
if self.state==TOO_HOT or self.state==INITIAL:
self._dispatch_next(self._make_event("OFF"))
self.state = NORMAL
def on_t_low_completed(self):
if not self.completed:
self._dispatch_completed()
self.completed = True
def on_t_low_error(self, e):
pass
def on_between_next(self, x):
if self.state==INITIAL:
self.state = NORMAL
self._dispatch_next(self._make_event("OFF"))
else:
pass # stay in current state
def on_between_error(self, e):
pass
def on_between_completed(self):
pass # don't want to pass this forward, as it will happen after the first item
def __repr__(self):
return 'Controller'
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import asyncio
import time
import random
import antevents.linq.transducer
import antevents.linq.where
import antevents.linq.first
import antevents.linq.dispatch
from antevents.base import Publisher, Scheduler, SensorEvent, SensorPub,\
DefaultSubscriber, FatalError
and context:
# Path: antevents/base.py
# class DefaultSubscriber:
# class CallableAsSubscriber:
# class FatalError(Exception):
# class InvalidTopicError(FatalError):
# class UnknownTopicError(FatalError):
# class TopicAlreadyClosed(FatalError):
# class ExcInDispatch(FatalError):
# class Publisher:
# class Filter(Publisher, DefaultSubscriber):
# class _ThunkBuilder:
# class DirectPublisherMixin:
# class EventLoopPublisherMixin:
# class IterableAsPublisher(Publisher, DirectPublisherMixin):
# class FunctionIteratorAsPublisher(Publisher, DirectPublisherMixin):
# class SensorPub(Publisher, DirectPublisherMixin):
# class BlockingSubscriber:
# class _ThreadForBlockingSubscriber(threading.Thread):
# class _ThreadForBlockingPublisher(threading.Thread):
# class ScheduleError(FatalError):
# class Scheduler:
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def _on_next_name(topic):
# def _on_error_name(topic):
# def _on_completed_name(topic):
# def __init__(self, on_next=None, on_error=None, on_completed=None,
# topic=None):
# def default_error(err):
# def __str__(self):
# def __init__(self, topics=None):
# def subscribe(self, subscriber, topic_mapping=None):
# def dispose():
# def _schedule(self, enqueue_fn):
# def _close_topic(self, topic):
# def _dispatch_next(self, x, topic=None):
# def _dispatch_completed(self, topic=None):
# def _dispatch_error(self, e, topic=None):
# def print_downstream(self):
# def has_subscribers(step):
# def print_from(current_seq, step):
# def pp_subscribers(self):
# def __init__(self, previous_in_chain,
# on_next=None, on_completed=None,
# on_error=None, name=None):
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def __str__(self):
# def _is_thunk(t):
# def _make_thunk(t):
# def __init__(self, func):
# def __call__(self, *args, **kwargs):
# def apply(this):
# def __repr__(self):
# def _subscribe_thunk(prev, thunk):
# def filtermethod(base, alias=None):
# def inner(func):
# def _observe(self):
# def _observe_event_loop(self):
# def _stop_loop(self):
# def __init__(self, iterable, name=None):
# def _observe(self):
# def _close(self):
# def __str__(self):
# def from_iterable(i):
# def from_list(l):
# def __init__(self, initial_state, condition, iterate, result_selector):
# def _observe(self):
# def from_func(init, cond, iter, selector):
# def __str__(self):
# def make_sensor_event(sensor, sample):
# def __init__(self, sensor, make_event_fn=make_sensor_event):
# def _observe(self):
# def __repr__(self):
# def __init__(self, scheduler, topics=None):
# def start():
# def request_stop(self):
# def _wait_and_dispatch(self):
# def _on_next(self, topic, x):
# def _on_completed(self, topic):
# def _on_error(self, topic, e):
# def _close(self):
# def __init__(self, subscriber, scheduler):
# def run(self):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, publisher, interval, scheduler):
# def _stop_loop(self):
# def run(self):
# def enqueue_fn(fn, *args):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, event_loop):
# def exception_handler(loop, context):
# def _remove_from_active_schedules(self, publisher):
# def schedule_periodic(self, publisher, interval):
# def cancel():
# def run():
# def schedule_sensor(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event,
# print_downstream=False):
# def schedule_recurring(self, publisher):
# def cancel():
# def run():
# def schedule_on_main_event_loop(self, publisher):
# def stop():
# def schedule_on_private_event_loop(self, publisher):
# def enqueue_fn(fn, *args):
# def thread_main():
# def die(): # need to stop the scheduler in the main loop
# def loop_done():
# def schedule_periodic_on_separate_thread(self, publisher, interval):
# def schedule_sensor_on_separate_thread(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event):
# def schedule_later_one_time(self, publisher, interval):
# def cancel():
# def run():
# def run_forever(self):
# def _schedule_coroutine(self, coro, done_callback):
# def cb(f):
# def stop(self):
# def recheck_stop(f):
which might include code, classes, or functions. Output only the next line. | class BypassValveActuator(DefaultSubscriber): |
Given the code snippet: <|code_start|> if not self.completed:
self._dispatch_completed()
self.completed = True
def on_t_low_error(self, e):
pass
def on_between_next(self, x):
if self.state==INITIAL:
self.state = NORMAL
self._dispatch_next(self._make_event("OFF"))
else:
pass # stay in current state
def on_between_error(self, e):
pass
def on_between_completed(self):
pass # don't want to pass this forward, as it will happen after the first item
def __repr__(self):
return 'Controller'
class BypassValveActuator(DefaultSubscriber):
def on_next(self, x):
if x.val=='ON':
print("Turning ON!")
elif x.val=='OFF':
print("Turning OFF!")
else:
<|code_end|>
, generate the next line using the imports in this file:
import asyncio
import time
import random
import antevents.linq.transducer
import antevents.linq.where
import antevents.linq.first
import antevents.linq.dispatch
from antevents.base import Publisher, Scheduler, SensorEvent, SensorPub,\
DefaultSubscriber, FatalError
and context (functions, classes, or occasionally code) from other files:
# Path: antevents/base.py
# class DefaultSubscriber:
# class CallableAsSubscriber:
# class FatalError(Exception):
# class InvalidTopicError(FatalError):
# class UnknownTopicError(FatalError):
# class TopicAlreadyClosed(FatalError):
# class ExcInDispatch(FatalError):
# class Publisher:
# class Filter(Publisher, DefaultSubscriber):
# class _ThunkBuilder:
# class DirectPublisherMixin:
# class EventLoopPublisherMixin:
# class IterableAsPublisher(Publisher, DirectPublisherMixin):
# class FunctionIteratorAsPublisher(Publisher, DirectPublisherMixin):
# class SensorPub(Publisher, DirectPublisherMixin):
# class BlockingSubscriber:
# class _ThreadForBlockingSubscriber(threading.Thread):
# class _ThreadForBlockingPublisher(threading.Thread):
# class ScheduleError(FatalError):
# class Scheduler:
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def _on_next_name(topic):
# def _on_error_name(topic):
# def _on_completed_name(topic):
# def __init__(self, on_next=None, on_error=None, on_completed=None,
# topic=None):
# def default_error(err):
# def __str__(self):
# def __init__(self, topics=None):
# def subscribe(self, subscriber, topic_mapping=None):
# def dispose():
# def _schedule(self, enqueue_fn):
# def _close_topic(self, topic):
# def _dispatch_next(self, x, topic=None):
# def _dispatch_completed(self, topic=None):
# def _dispatch_error(self, e, topic=None):
# def print_downstream(self):
# def has_subscribers(step):
# def print_from(current_seq, step):
# def pp_subscribers(self):
# def __init__(self, previous_in_chain,
# on_next=None, on_completed=None,
# on_error=None, name=None):
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def __str__(self):
# def _is_thunk(t):
# def _make_thunk(t):
# def __init__(self, func):
# def __call__(self, *args, **kwargs):
# def apply(this):
# def __repr__(self):
# def _subscribe_thunk(prev, thunk):
# def filtermethod(base, alias=None):
# def inner(func):
# def _observe(self):
# def _observe_event_loop(self):
# def _stop_loop(self):
# def __init__(self, iterable, name=None):
# def _observe(self):
# def _close(self):
# def __str__(self):
# def from_iterable(i):
# def from_list(l):
# def __init__(self, initial_state, condition, iterate, result_selector):
# def _observe(self):
# def from_func(init, cond, iter, selector):
# def __str__(self):
# def make_sensor_event(sensor, sample):
# def __init__(self, sensor, make_event_fn=make_sensor_event):
# def _observe(self):
# def __repr__(self):
# def __init__(self, scheduler, topics=None):
# def start():
# def request_stop(self):
# def _wait_and_dispatch(self):
# def _on_next(self, topic, x):
# def _on_completed(self, topic):
# def _on_error(self, topic, e):
# def _close(self):
# def __init__(self, subscriber, scheduler):
# def run(self):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, publisher, interval, scheduler):
# def _stop_loop(self):
# def run(self):
# def enqueue_fn(fn, *args):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, event_loop):
# def exception_handler(loop, context):
# def _remove_from_active_schedules(self, publisher):
# def schedule_periodic(self, publisher, interval):
# def cancel():
# def run():
# def schedule_sensor(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event,
# print_downstream=False):
# def schedule_recurring(self, publisher):
# def cancel():
# def run():
# def schedule_on_main_event_loop(self, publisher):
# def stop():
# def schedule_on_private_event_loop(self, publisher):
# def enqueue_fn(fn, *args):
# def thread_main():
# def die(): # need to stop the scheduler in the main loop
# def loop_done():
# def schedule_periodic_on_separate_thread(self, publisher, interval):
# def schedule_sensor_on_separate_thread(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event):
# def schedule_later_one_time(self, publisher, interval):
# def cancel():
# def run():
# def run_forever(self):
# def _schedule_coroutine(self, coro, done_callback):
# def cb(f):
# def stop(self):
# def recheck_stop(f):
. Output only the next line. | raise FatalError("Unexpected event value for actuator: %s" % x.val) |
Based on the snippet: <|code_start|># Copyright 2016 by MPI-SWS and Data-Ken Research.
# Licensed under the Apache 2.0 License.
"""
Output on raspberry pi gpio pins
"""
class GpioPinOut(DefaultSubscriber):
"""Actuator for an output pin on the GPIO bus.
"""
def __init__(self, port=11):
self.port = port
gpio.setmode(gpio.BOARD)
gpio.setup(port, gpio.OUT, initial=gpio.LOW)
self.current_state = False
self.closed = False
def on_next(self, x):
"""If x is a truthy value, we turn the light on
"""
<|code_end|>
, predict the immediate next line with the help of imports:
import RPi.GPIO as gpio
from antevents.base import DefaultSubscriber, SensorEvent
and context (classes, functions, sometimes code) from other files:
# Path: antevents/base.py
# class DefaultSubscriber:
# class CallableAsSubscriber:
# class FatalError(Exception):
# class InvalidTopicError(FatalError):
# class UnknownTopicError(FatalError):
# class TopicAlreadyClosed(FatalError):
# class ExcInDispatch(FatalError):
# class Publisher:
# class Filter(Publisher, DefaultSubscriber):
# class _ThunkBuilder:
# class DirectPublisherMixin:
# class EventLoopPublisherMixin:
# class IterableAsPublisher(Publisher, DirectPublisherMixin):
# class FunctionIteratorAsPublisher(Publisher, DirectPublisherMixin):
# class SensorPub(Publisher, DirectPublisherMixin):
# class BlockingSubscriber:
# class _ThreadForBlockingSubscriber(threading.Thread):
# class _ThreadForBlockingPublisher(threading.Thread):
# class ScheduleError(FatalError):
# class Scheduler:
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def _on_next_name(topic):
# def _on_error_name(topic):
# def _on_completed_name(topic):
# def __init__(self, on_next=None, on_error=None, on_completed=None,
# topic=None):
# def default_error(err):
# def __str__(self):
# def __init__(self, topics=None):
# def subscribe(self, subscriber, topic_mapping=None):
# def dispose():
# def _schedule(self, enqueue_fn):
# def _close_topic(self, topic):
# def _dispatch_next(self, x, topic=None):
# def _dispatch_completed(self, topic=None):
# def _dispatch_error(self, e, topic=None):
# def print_downstream(self):
# def has_subscribers(step):
# def print_from(current_seq, step):
# def pp_subscribers(self):
# def __init__(self, previous_in_chain,
# on_next=None, on_completed=None,
# on_error=None, name=None):
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def __str__(self):
# def _is_thunk(t):
# def _make_thunk(t):
# def __init__(self, func):
# def __call__(self, *args, **kwargs):
# def apply(this):
# def __repr__(self):
# def _subscribe_thunk(prev, thunk):
# def filtermethod(base, alias=None):
# def inner(func):
# def _observe(self):
# def _observe_event_loop(self):
# def _stop_loop(self):
# def __init__(self, iterable, name=None):
# def _observe(self):
# def _close(self):
# def __str__(self):
# def from_iterable(i):
# def from_list(l):
# def __init__(self, initial_state, condition, iterate, result_selector):
# def _observe(self):
# def from_func(init, cond, iter, selector):
# def __str__(self):
# def make_sensor_event(sensor, sample):
# def __init__(self, sensor, make_event_fn=make_sensor_event):
# def _observe(self):
# def __repr__(self):
# def __init__(self, scheduler, topics=None):
# def start():
# def request_stop(self):
# def _wait_and_dispatch(self):
# def _on_next(self, topic, x):
# def _on_completed(self, topic):
# def _on_error(self, topic, e):
# def _close(self):
# def __init__(self, subscriber, scheduler):
# def run(self):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, publisher, interval, scheduler):
# def _stop_loop(self):
# def run(self):
# def enqueue_fn(fn, *args):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, event_loop):
# def exception_handler(loop, context):
# def _remove_from_active_schedules(self, publisher):
# def schedule_periodic(self, publisher, interval):
# def cancel():
# def run():
# def schedule_sensor(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event,
# print_downstream=False):
# def schedule_recurring(self, publisher):
# def cancel():
# def run():
# def schedule_on_main_event_loop(self, publisher):
# def stop():
# def schedule_on_private_event_loop(self, publisher):
# def enqueue_fn(fn, *args):
# def thread_main():
# def die(): # need to stop the scheduler in the main loop
# def loop_done():
# def schedule_periodic_on_separate_thread(self, publisher, interval):
# def schedule_sensor_on_separate_thread(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event):
# def schedule_later_one_time(self, publisher, interval):
# def cancel():
# def run():
# def run_forever(self):
# def _schedule_coroutine(self, coro, done_callback):
# def cb(f):
# def stop(self):
# def recheck_stop(f):
. Output only the next line. | assert not isinstance(x, SensorEvent), "Send a raw value, not a sensor event" |
Predict the next line for this snippet: <|code_start|># Copyright 2016 by MPI-SWS and Data-Ken Research.
# Licensed under the Apache 2.0 License.
"""Sensors for AntEvents
Uses the nanpy library (https://github.com/nanpy/nanpy), which controls
a slave Arduino processor. The sensors are connected to the Arduino.
Both digital (1/0 output) and analogue (0-1023 ouput) sensors may be
be connected to the Arduino. To use this, Nanpy firmware needs to be
flashed onto the Arduino to allow Python to be used.
Note -This sensor class can only be used with sensors which send their output
straight to the Arduino pins. For sensors which use I2C or SPI, with their
own registers, a library to use them has to be written separately.
"""
ardApi = ArduinoApi(connection=SerialManager(device = '/dev/ttyACM0'))
<|code_end|>
with the help of current file imports:
from antevents.base import Publisher, IndirectPublisherMixin
from nanpy import ArduinoApi,SerialManager
and context from other files:
# Path: antevents/base.py
# class DefaultSubscriber:
# class CallableAsSubscriber:
# class FatalError(Exception):
# class InvalidTopicError(FatalError):
# class UnknownTopicError(FatalError):
# class TopicAlreadyClosed(FatalError):
# class ExcInDispatch(FatalError):
# class Publisher:
# class Filter(Publisher, DefaultSubscriber):
# class _ThunkBuilder:
# class DirectPublisherMixin:
# class EventLoopPublisherMixin:
# class IterableAsPublisher(Publisher, DirectPublisherMixin):
# class FunctionIteratorAsPublisher(Publisher, DirectPublisherMixin):
# class SensorPub(Publisher, DirectPublisherMixin):
# class BlockingSubscriber:
# class _ThreadForBlockingSubscriber(threading.Thread):
# class _ThreadForBlockingPublisher(threading.Thread):
# class ScheduleError(FatalError):
# class Scheduler:
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def _on_next_name(topic):
# def _on_error_name(topic):
# def _on_completed_name(topic):
# def __init__(self, on_next=None, on_error=None, on_completed=None,
# topic=None):
# def default_error(err):
# def __str__(self):
# def __init__(self, topics=None):
# def subscribe(self, subscriber, topic_mapping=None):
# def dispose():
# def _schedule(self, enqueue_fn):
# def _close_topic(self, topic):
# def _dispatch_next(self, x, topic=None):
# def _dispatch_completed(self, topic=None):
# def _dispatch_error(self, e, topic=None):
# def print_downstream(self):
# def has_subscribers(step):
# def print_from(current_seq, step):
# def pp_subscribers(self):
# def __init__(self, previous_in_chain,
# on_next=None, on_completed=None,
# on_error=None, name=None):
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def __str__(self):
# def _is_thunk(t):
# def _make_thunk(t):
# def __init__(self, func):
# def __call__(self, *args, **kwargs):
# def apply(this):
# def __repr__(self):
# def _subscribe_thunk(prev, thunk):
# def filtermethod(base, alias=None):
# def inner(func):
# def _observe(self):
# def _observe_event_loop(self):
# def _stop_loop(self):
# def __init__(self, iterable, name=None):
# def _observe(self):
# def _close(self):
# def __str__(self):
# def from_iterable(i):
# def from_list(l):
# def __init__(self, initial_state, condition, iterate, result_selector):
# def _observe(self):
# def from_func(init, cond, iter, selector):
# def __str__(self):
# def make_sensor_event(sensor, sample):
# def __init__(self, sensor, make_event_fn=make_sensor_event):
# def _observe(self):
# def __repr__(self):
# def __init__(self, scheduler, topics=None):
# def start():
# def request_stop(self):
# def _wait_and_dispatch(self):
# def _on_next(self, topic, x):
# def _on_completed(self, topic):
# def _on_error(self, topic, e):
# def _close(self):
# def __init__(self, subscriber, scheduler):
# def run(self):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, publisher, interval, scheduler):
# def _stop_loop(self):
# def run(self):
# def enqueue_fn(fn, *args):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, event_loop):
# def exception_handler(loop, context):
# def _remove_from_active_schedules(self, publisher):
# def schedule_periodic(self, publisher, interval):
# def cancel():
# def run():
# def schedule_sensor(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event,
# print_downstream=False):
# def schedule_recurring(self, publisher):
# def cancel():
# def run():
# def schedule_on_main_event_loop(self, publisher):
# def stop():
# def schedule_on_private_event_loop(self, publisher):
# def enqueue_fn(fn, *args):
# def thread_main():
# def die(): # need to stop the scheduler in the main loop
# def loop_done():
# def schedule_periodic_on_separate_thread(self, publisher, interval):
# def schedule_sensor_on_separate_thread(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event):
# def schedule_later_one_time(self, publisher, interval):
# def cancel():
# def run():
# def run_forever(self):
# def _schedule_coroutine(self, coro, done_callback):
# def cb(f):
# def stop(self):
# def recheck_stop(f):
, which may contain function names, class names, or code. Output only the next line. | class ArduinoSensor(Publisher, IndirectPublisherMixin): |
Given the following code snippet before the placeholder: <|code_start|># Copyright 2016 by MPI-SWS and Data-Ken Research.
# Licensed under the Apache 2.0 License.
"""Sensors for AntEvents
Uses the nanpy library (https://github.com/nanpy/nanpy), which controls
a slave Arduino processor. The sensors are connected to the Arduino.
Both digital (1/0 output) and analogue (0-1023 ouput) sensors may be
be connected to the Arduino. To use this, Nanpy firmware needs to be
flashed onto the Arduino to allow Python to be used.
Note -This sensor class can only be used with sensors which send their output
straight to the Arduino pins. For sensors which use I2C or SPI, with their
own registers, a library to use them has to be written separately.
"""
ardApi = ArduinoApi(connection=SerialManager(device = '/dev/ttyACM0'))
<|code_end|>
, predict the next line using imports from the current file:
from antevents.base import Publisher, IndirectPublisherMixin
from nanpy import ArduinoApi,SerialManager
and context including class names, function names, and sometimes code from other files:
# Path: antevents/base.py
# class DefaultSubscriber:
# class CallableAsSubscriber:
# class FatalError(Exception):
# class InvalidTopicError(FatalError):
# class UnknownTopicError(FatalError):
# class TopicAlreadyClosed(FatalError):
# class ExcInDispatch(FatalError):
# class Publisher:
# class Filter(Publisher, DefaultSubscriber):
# class _ThunkBuilder:
# class DirectPublisherMixin:
# class EventLoopPublisherMixin:
# class IterableAsPublisher(Publisher, DirectPublisherMixin):
# class FunctionIteratorAsPublisher(Publisher, DirectPublisherMixin):
# class SensorPub(Publisher, DirectPublisherMixin):
# class BlockingSubscriber:
# class _ThreadForBlockingSubscriber(threading.Thread):
# class _ThreadForBlockingPublisher(threading.Thread):
# class ScheduleError(FatalError):
# class Scheduler:
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def _on_next_name(topic):
# def _on_error_name(topic):
# def _on_completed_name(topic):
# def __init__(self, on_next=None, on_error=None, on_completed=None,
# topic=None):
# def default_error(err):
# def __str__(self):
# def __init__(self, topics=None):
# def subscribe(self, subscriber, topic_mapping=None):
# def dispose():
# def _schedule(self, enqueue_fn):
# def _close_topic(self, topic):
# def _dispatch_next(self, x, topic=None):
# def _dispatch_completed(self, topic=None):
# def _dispatch_error(self, e, topic=None):
# def print_downstream(self):
# def has_subscribers(step):
# def print_from(current_seq, step):
# def pp_subscribers(self):
# def __init__(self, previous_in_chain,
# on_next=None, on_completed=None,
# on_error=None, name=None):
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def __str__(self):
# def _is_thunk(t):
# def _make_thunk(t):
# def __init__(self, func):
# def __call__(self, *args, **kwargs):
# def apply(this):
# def __repr__(self):
# def _subscribe_thunk(prev, thunk):
# def filtermethod(base, alias=None):
# def inner(func):
# def _observe(self):
# def _observe_event_loop(self):
# def _stop_loop(self):
# def __init__(self, iterable, name=None):
# def _observe(self):
# def _close(self):
# def __str__(self):
# def from_iterable(i):
# def from_list(l):
# def __init__(self, initial_state, condition, iterate, result_selector):
# def _observe(self):
# def from_func(init, cond, iter, selector):
# def __str__(self):
# def make_sensor_event(sensor, sample):
# def __init__(self, sensor, make_event_fn=make_sensor_event):
# def _observe(self):
# def __repr__(self):
# def __init__(self, scheduler, topics=None):
# def start():
# def request_stop(self):
# def _wait_and_dispatch(self):
# def _on_next(self, topic, x):
# def _on_completed(self, topic):
# def _on_error(self, topic, e):
# def _close(self):
# def __init__(self, subscriber, scheduler):
# def run(self):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, publisher, interval, scheduler):
# def _stop_loop(self):
# def run(self):
# def enqueue_fn(fn, *args):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, event_loop):
# def exception_handler(loop, context):
# def _remove_from_active_schedules(self, publisher):
# def schedule_periodic(self, publisher, interval):
# def cancel():
# def run():
# def schedule_sensor(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event,
# print_downstream=False):
# def schedule_recurring(self, publisher):
# def cancel():
# def run():
# def schedule_on_main_event_loop(self, publisher):
# def stop():
# def schedule_on_private_event_loop(self, publisher):
# def enqueue_fn(fn, *args):
# def thread_main():
# def die(): # need to stop the scheduler in the main loop
# def loop_done():
# def schedule_periodic_on_separate_thread(self, publisher, interval):
# def schedule_sensor_on_separate_thread(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event):
# def schedule_later_one_time(self, publisher, interval):
# def cancel():
# def run():
# def run_forever(self):
# def _schedule_coroutine(self, coro, done_callback):
# def cb(f):
# def stop(self):
# def recheck_stop(f):
. Output only the next line. | class ArduinoSensor(Publisher, IndirectPublisherMixin): |
Predict the next line for this snippet: <|code_start|># Copyright 2016 by MPI-SWS and Data-Ken Research.
# Licensed under the Apache 2.0 License.
"""Raspberry Pi GPIO Sensor for AntEvents.
Allows digital (1/0 output) sensors to be connected straight to the
Raspberry Pi (ADC needed for the Pi to take analogue output).
This sensor class can only be used with sensors which send their output straight
to the Raspberry Pi GPIO pins. For sensors which use I2C or SPI, with their
own registers, a library to use them has to be written separately.
"""
<|code_end|>
with the help of current file imports:
from antevents.base import Publisher, IndirectPublisherMixin
import RPi.GPIO as GPIO
and context from other files:
# Path: antevents/base.py
# class DefaultSubscriber:
# class CallableAsSubscriber:
# class FatalError(Exception):
# class InvalidTopicError(FatalError):
# class UnknownTopicError(FatalError):
# class TopicAlreadyClosed(FatalError):
# class ExcInDispatch(FatalError):
# class Publisher:
# class Filter(Publisher, DefaultSubscriber):
# class _ThunkBuilder:
# class DirectPublisherMixin:
# class EventLoopPublisherMixin:
# class IterableAsPublisher(Publisher, DirectPublisherMixin):
# class FunctionIteratorAsPublisher(Publisher, DirectPublisherMixin):
# class SensorPub(Publisher, DirectPublisherMixin):
# class BlockingSubscriber:
# class _ThreadForBlockingSubscriber(threading.Thread):
# class _ThreadForBlockingPublisher(threading.Thread):
# class ScheduleError(FatalError):
# class Scheduler:
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def _on_next_name(topic):
# def _on_error_name(topic):
# def _on_completed_name(topic):
# def __init__(self, on_next=None, on_error=None, on_completed=None,
# topic=None):
# def default_error(err):
# def __str__(self):
# def __init__(self, topics=None):
# def subscribe(self, subscriber, topic_mapping=None):
# def dispose():
# def _schedule(self, enqueue_fn):
# def _close_topic(self, topic):
# def _dispatch_next(self, x, topic=None):
# def _dispatch_completed(self, topic=None):
# def _dispatch_error(self, e, topic=None):
# def print_downstream(self):
# def has_subscribers(step):
# def print_from(current_seq, step):
# def pp_subscribers(self):
# def __init__(self, previous_in_chain,
# on_next=None, on_completed=None,
# on_error=None, name=None):
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def __str__(self):
# def _is_thunk(t):
# def _make_thunk(t):
# def __init__(self, func):
# def __call__(self, *args, **kwargs):
# def apply(this):
# def __repr__(self):
# def _subscribe_thunk(prev, thunk):
# def filtermethod(base, alias=None):
# def inner(func):
# def _observe(self):
# def _observe_event_loop(self):
# def _stop_loop(self):
# def __init__(self, iterable, name=None):
# def _observe(self):
# def _close(self):
# def __str__(self):
# def from_iterable(i):
# def from_list(l):
# def __init__(self, initial_state, condition, iterate, result_selector):
# def _observe(self):
# def from_func(init, cond, iter, selector):
# def __str__(self):
# def make_sensor_event(sensor, sample):
# def __init__(self, sensor, make_event_fn=make_sensor_event):
# def _observe(self):
# def __repr__(self):
# def __init__(self, scheduler, topics=None):
# def start():
# def request_stop(self):
# def _wait_and_dispatch(self):
# def _on_next(self, topic, x):
# def _on_completed(self, topic):
# def _on_error(self, topic, e):
# def _close(self):
# def __init__(self, subscriber, scheduler):
# def run(self):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, publisher, interval, scheduler):
# def _stop_loop(self):
# def run(self):
# def enqueue_fn(fn, *args):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, event_loop):
# def exception_handler(loop, context):
# def _remove_from_active_schedules(self, publisher):
# def schedule_periodic(self, publisher, interval):
# def cancel():
# def run():
# def schedule_sensor(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event,
# print_downstream=False):
# def schedule_recurring(self, publisher):
# def cancel():
# def run():
# def schedule_on_main_event_loop(self, publisher):
# def stop():
# def schedule_on_private_event_loop(self, publisher):
# def enqueue_fn(fn, *args):
# def thread_main():
# def die(): # need to stop the scheduler in the main loop
# def loop_done():
# def schedule_periodic_on_separate_thread(self, publisher, interval):
# def schedule_sensor_on_separate_thread(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event):
# def schedule_later_one_time(self, publisher, interval):
# def cancel():
# def run():
# def run_forever(self):
# def _schedule_coroutine(self, coro, done_callback):
# def cb(f):
# def stop(self):
# def recheck_stop(f):
, which may contain function names, class names, or code. Output only the next line. | class RPISensor(Publisher, IndirectPublisherMixin): |
Using the snippet: <|code_start|># Copyright 2016 by MPI-SWS and Data-Ken Research.
# Licensed under the Apache 2.0 License.
"""Raspberry Pi GPIO Sensor for AntEvents.
Allows digital (1/0 output) sensors to be connected straight to the
Raspberry Pi (ADC needed for the Pi to take analogue output).
This sensor class can only be used with sensors which send their output straight
to the Raspberry Pi GPIO pins. For sensors which use I2C or SPI, with their
own registers, a library to use them has to be written separately.
"""
<|code_end|>
, determine the next line of code. You have imports:
from antevents.base import Publisher, IndirectPublisherMixin
import RPi.GPIO as GPIO
and context (class names, function names, or code) available:
# Path: antevents/base.py
# class DefaultSubscriber:
# class CallableAsSubscriber:
# class FatalError(Exception):
# class InvalidTopicError(FatalError):
# class UnknownTopicError(FatalError):
# class TopicAlreadyClosed(FatalError):
# class ExcInDispatch(FatalError):
# class Publisher:
# class Filter(Publisher, DefaultSubscriber):
# class _ThunkBuilder:
# class DirectPublisherMixin:
# class EventLoopPublisherMixin:
# class IterableAsPublisher(Publisher, DirectPublisherMixin):
# class FunctionIteratorAsPublisher(Publisher, DirectPublisherMixin):
# class SensorPub(Publisher, DirectPublisherMixin):
# class BlockingSubscriber:
# class _ThreadForBlockingSubscriber(threading.Thread):
# class _ThreadForBlockingPublisher(threading.Thread):
# class ScheduleError(FatalError):
# class Scheduler:
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def _on_next_name(topic):
# def _on_error_name(topic):
# def _on_completed_name(topic):
# def __init__(self, on_next=None, on_error=None, on_completed=None,
# topic=None):
# def default_error(err):
# def __str__(self):
# def __init__(self, topics=None):
# def subscribe(self, subscriber, topic_mapping=None):
# def dispose():
# def _schedule(self, enqueue_fn):
# def _close_topic(self, topic):
# def _dispatch_next(self, x, topic=None):
# def _dispatch_completed(self, topic=None):
# def _dispatch_error(self, e, topic=None):
# def print_downstream(self):
# def has_subscribers(step):
# def print_from(current_seq, step):
# def pp_subscribers(self):
# def __init__(self, previous_in_chain,
# on_next=None, on_completed=None,
# on_error=None, name=None):
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def __str__(self):
# def _is_thunk(t):
# def _make_thunk(t):
# def __init__(self, func):
# def __call__(self, *args, **kwargs):
# def apply(this):
# def __repr__(self):
# def _subscribe_thunk(prev, thunk):
# def filtermethod(base, alias=None):
# def inner(func):
# def _observe(self):
# def _observe_event_loop(self):
# def _stop_loop(self):
# def __init__(self, iterable, name=None):
# def _observe(self):
# def _close(self):
# def __str__(self):
# def from_iterable(i):
# def from_list(l):
# def __init__(self, initial_state, condition, iterate, result_selector):
# def _observe(self):
# def from_func(init, cond, iter, selector):
# def __str__(self):
# def make_sensor_event(sensor, sample):
# def __init__(self, sensor, make_event_fn=make_sensor_event):
# def _observe(self):
# def __repr__(self):
# def __init__(self, scheduler, topics=None):
# def start():
# def request_stop(self):
# def _wait_and_dispatch(self):
# def _on_next(self, topic, x):
# def _on_completed(self, topic):
# def _on_error(self, topic, e):
# def _close(self):
# def __init__(self, subscriber, scheduler):
# def run(self):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, publisher, interval, scheduler):
# def _stop_loop(self):
# def run(self):
# def enqueue_fn(fn, *args):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, event_loop):
# def exception_handler(loop, context):
# def _remove_from_active_schedules(self, publisher):
# def schedule_periodic(self, publisher, interval):
# def cancel():
# def run():
# def schedule_sensor(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event,
# print_downstream=False):
# def schedule_recurring(self, publisher):
# def cancel():
# def run():
# def schedule_on_main_event_loop(self, publisher):
# def stop():
# def schedule_on_private_event_loop(self, publisher):
# def enqueue_fn(fn, *args):
# def thread_main():
# def die(): # need to stop the scheduler in the main loop
# def loop_done():
# def schedule_periodic_on_separate_thread(self, publisher, interval):
# def schedule_sensor_on_separate_thread(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event):
# def schedule_later_one_time(self, publisher, interval):
# def cancel():
# def run():
# def run_forever(self):
# def _schedule_coroutine(self, coro, done_callback):
# def cb(f):
# def stop(self):
# def recheck_stop(f):
. Output only the next line. | class RPISensor(Publisher, IndirectPublisherMixin): |
Given snippet: <|code_start|>"""
This is the example from publishers.rst. It reads a CSV-formatted spreadsheet
file and generates an event from each line. We call publishers that pull
data from an external source "readers".
There is a more flexible csv reader class defined in
antevents.adapters.csv.
"""
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import csv
import sys
import asyncio
import antevents.linq.output # load the output method on the publisher
from antevents.base import Publisher, DirectPublisherMixin, Scheduler,\
SensorEvent, FatalError
and context:
# Path: antevents/base.py
# class DefaultSubscriber:
# class CallableAsSubscriber:
# class FatalError(Exception):
# class InvalidTopicError(FatalError):
# class UnknownTopicError(FatalError):
# class TopicAlreadyClosed(FatalError):
# class ExcInDispatch(FatalError):
# class Publisher:
# class Filter(Publisher, DefaultSubscriber):
# class _ThunkBuilder:
# class DirectPublisherMixin:
# class EventLoopPublisherMixin:
# class IterableAsPublisher(Publisher, DirectPublisherMixin):
# class FunctionIteratorAsPublisher(Publisher, DirectPublisherMixin):
# class SensorPub(Publisher, DirectPublisherMixin):
# class BlockingSubscriber:
# class _ThreadForBlockingSubscriber(threading.Thread):
# class _ThreadForBlockingPublisher(threading.Thread):
# class ScheduleError(FatalError):
# class Scheduler:
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def _on_next_name(topic):
# def _on_error_name(topic):
# def _on_completed_name(topic):
# def __init__(self, on_next=None, on_error=None, on_completed=None,
# topic=None):
# def default_error(err):
# def __str__(self):
# def __init__(self, topics=None):
# def subscribe(self, subscriber, topic_mapping=None):
# def dispose():
# def _schedule(self, enqueue_fn):
# def _close_topic(self, topic):
# def _dispatch_next(self, x, topic=None):
# def _dispatch_completed(self, topic=None):
# def _dispatch_error(self, e, topic=None):
# def print_downstream(self):
# def has_subscribers(step):
# def print_from(current_seq, step):
# def pp_subscribers(self):
# def __init__(self, previous_in_chain,
# on_next=None, on_completed=None,
# on_error=None, name=None):
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def __str__(self):
# def _is_thunk(t):
# def _make_thunk(t):
# def __init__(self, func):
# def __call__(self, *args, **kwargs):
# def apply(this):
# def __repr__(self):
# def _subscribe_thunk(prev, thunk):
# def filtermethod(base, alias=None):
# def inner(func):
# def _observe(self):
# def _observe_event_loop(self):
# def _stop_loop(self):
# def __init__(self, iterable, name=None):
# def _observe(self):
# def _close(self):
# def __str__(self):
# def from_iterable(i):
# def from_list(l):
# def __init__(self, initial_state, condition, iterate, result_selector):
# def _observe(self):
# def from_func(init, cond, iter, selector):
# def __str__(self):
# def make_sensor_event(sensor, sample):
# def __init__(self, sensor, make_event_fn=make_sensor_event):
# def _observe(self):
# def __repr__(self):
# def __init__(self, scheduler, topics=None):
# def start():
# def request_stop(self):
# def _wait_and_dispatch(self):
# def _on_next(self, topic, x):
# def _on_completed(self, topic):
# def _on_error(self, topic, e):
# def _close(self):
# def __init__(self, subscriber, scheduler):
# def run(self):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, publisher, interval, scheduler):
# def _stop_loop(self):
# def run(self):
# def enqueue_fn(fn, *args):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, event_loop):
# def exception_handler(loop, context):
# def _remove_from_active_schedules(self, publisher):
# def schedule_periodic(self, publisher, interval):
# def cancel():
# def run():
# def schedule_sensor(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event,
# print_downstream=False):
# def schedule_recurring(self, publisher):
# def cancel():
# def run():
# def schedule_on_main_event_loop(self, publisher):
# def stop():
# def schedule_on_private_event_loop(self, publisher):
# def enqueue_fn(fn, *args):
# def thread_main():
# def die(): # need to stop the scheduler in the main loop
# def loop_done():
# def schedule_periodic_on_separate_thread(self, publisher, interval):
# def schedule_sensor_on_separate_thread(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event):
# def schedule_later_one_time(self, publisher, interval):
# def cancel():
# def run():
# def run_forever(self):
# def _schedule_coroutine(self, coro, done_callback):
# def cb(f):
# def stop(self):
# def recheck_stop(f):
which might include code, classes, or functions. Output only the next line. | class SimpleCsvReader(Publisher, DirectPublisherMixin): |
Using the snippet: <|code_start|>"""
This is the example from publishers.rst. It reads a CSV-formatted spreadsheet
file and generates an event from each line. We call publishers that pull
data from an external source "readers".
There is a more flexible csv reader class defined in
antevents.adapters.csv.
"""
<|code_end|>
, determine the next line of code. You have imports:
import csv
import sys
import asyncio
import antevents.linq.output # load the output method on the publisher
from antevents.base import Publisher, DirectPublisherMixin, Scheduler,\
SensorEvent, FatalError
and context (class names, function names, or code) available:
# Path: antevents/base.py
# class DefaultSubscriber:
# class CallableAsSubscriber:
# class FatalError(Exception):
# class InvalidTopicError(FatalError):
# class UnknownTopicError(FatalError):
# class TopicAlreadyClosed(FatalError):
# class ExcInDispatch(FatalError):
# class Publisher:
# class Filter(Publisher, DefaultSubscriber):
# class _ThunkBuilder:
# class DirectPublisherMixin:
# class EventLoopPublisherMixin:
# class IterableAsPublisher(Publisher, DirectPublisherMixin):
# class FunctionIteratorAsPublisher(Publisher, DirectPublisherMixin):
# class SensorPub(Publisher, DirectPublisherMixin):
# class BlockingSubscriber:
# class _ThreadForBlockingSubscriber(threading.Thread):
# class _ThreadForBlockingPublisher(threading.Thread):
# class ScheduleError(FatalError):
# class Scheduler:
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def _on_next_name(topic):
# def _on_error_name(topic):
# def _on_completed_name(topic):
# def __init__(self, on_next=None, on_error=None, on_completed=None,
# topic=None):
# def default_error(err):
# def __str__(self):
# def __init__(self, topics=None):
# def subscribe(self, subscriber, topic_mapping=None):
# def dispose():
# def _schedule(self, enqueue_fn):
# def _close_topic(self, topic):
# def _dispatch_next(self, x, topic=None):
# def _dispatch_completed(self, topic=None):
# def _dispatch_error(self, e, topic=None):
# def print_downstream(self):
# def has_subscribers(step):
# def print_from(current_seq, step):
# def pp_subscribers(self):
# def __init__(self, previous_in_chain,
# on_next=None, on_completed=None,
# on_error=None, name=None):
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def __str__(self):
# def _is_thunk(t):
# def _make_thunk(t):
# def __init__(self, func):
# def __call__(self, *args, **kwargs):
# def apply(this):
# def __repr__(self):
# def _subscribe_thunk(prev, thunk):
# def filtermethod(base, alias=None):
# def inner(func):
# def _observe(self):
# def _observe_event_loop(self):
# def _stop_loop(self):
# def __init__(self, iterable, name=None):
# def _observe(self):
# def _close(self):
# def __str__(self):
# def from_iterable(i):
# def from_list(l):
# def __init__(self, initial_state, condition, iterate, result_selector):
# def _observe(self):
# def from_func(init, cond, iter, selector):
# def __str__(self):
# def make_sensor_event(sensor, sample):
# def __init__(self, sensor, make_event_fn=make_sensor_event):
# def _observe(self):
# def __repr__(self):
# def __init__(self, scheduler, topics=None):
# def start():
# def request_stop(self):
# def _wait_and_dispatch(self):
# def _on_next(self, topic, x):
# def _on_completed(self, topic):
# def _on_error(self, topic, e):
# def _close(self):
# def __init__(self, subscriber, scheduler):
# def run(self):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, publisher, interval, scheduler):
# def _stop_loop(self):
# def run(self):
# def enqueue_fn(fn, *args):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, event_loop):
# def exception_handler(loop, context):
# def _remove_from_active_schedules(self, publisher):
# def schedule_periodic(self, publisher, interval):
# def cancel():
# def run():
# def schedule_sensor(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event,
# print_downstream=False):
# def schedule_recurring(self, publisher):
# def cancel():
# def run():
# def schedule_on_main_event_loop(self, publisher):
# def stop():
# def schedule_on_private_event_loop(self, publisher):
# def enqueue_fn(fn, *args):
# def thread_main():
# def die(): # need to stop the scheduler in the main loop
# def loop_done():
# def schedule_periodic_on_separate_thread(self, publisher, interval):
# def schedule_sensor_on_separate_thread(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event):
# def schedule_later_one_time(self, publisher, interval):
# def cancel():
# def run():
# def run_forever(self):
# def _schedule_coroutine(self, coro, done_callback):
# def cb(f):
# def stop(self):
# def recheck_stop(f):
. Output only the next line. | class SimpleCsvReader(Publisher, DirectPublisherMixin): |
Next line prediction: <|code_start|> def _observe(self):
try:
row = self.reader.__next__()
event = SensorEvent(ts=float(row[0]), sensor_id=row[1],
val=float(row[2]))
self._dispatch_next(event)
except StopIteration:
self.file.close()
self._dispatch_completed()
except FatalError:
self._close()
raise
except Exception as e:
self.file.close()
self._dispatch_error(e)
# If we are running this as a script, read events from the specified
# file and print them via output().
if __name__ == '__main__':
# check command line arguments
if len(sys.argv)!=2:
# did not provide filename or provided too many arguments
sys.stderr.write("%s FILENAME\n" % sys.argv[0])
if len(sys.argv)==1:
sys.stderr.write(" FILENAME is a required parameter\n")
sys.exit(1)
reader = SimpleCsvReader(sys.argv[1])
reader.output()
<|code_end|>
. Use current file imports:
(import csv
import sys
import asyncio
import antevents.linq.output # load the output method on the publisher
from antevents.base import Publisher, DirectPublisherMixin, Scheduler,\
SensorEvent, FatalError)
and context including class names, function names, or small code snippets from other files:
# Path: antevents/base.py
# class DefaultSubscriber:
# class CallableAsSubscriber:
# class FatalError(Exception):
# class InvalidTopicError(FatalError):
# class UnknownTopicError(FatalError):
# class TopicAlreadyClosed(FatalError):
# class ExcInDispatch(FatalError):
# class Publisher:
# class Filter(Publisher, DefaultSubscriber):
# class _ThunkBuilder:
# class DirectPublisherMixin:
# class EventLoopPublisherMixin:
# class IterableAsPublisher(Publisher, DirectPublisherMixin):
# class FunctionIteratorAsPublisher(Publisher, DirectPublisherMixin):
# class SensorPub(Publisher, DirectPublisherMixin):
# class BlockingSubscriber:
# class _ThreadForBlockingSubscriber(threading.Thread):
# class _ThreadForBlockingPublisher(threading.Thread):
# class ScheduleError(FatalError):
# class Scheduler:
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def _on_next_name(topic):
# def _on_error_name(topic):
# def _on_completed_name(topic):
# def __init__(self, on_next=None, on_error=None, on_completed=None,
# topic=None):
# def default_error(err):
# def __str__(self):
# def __init__(self, topics=None):
# def subscribe(self, subscriber, topic_mapping=None):
# def dispose():
# def _schedule(self, enqueue_fn):
# def _close_topic(self, topic):
# def _dispatch_next(self, x, topic=None):
# def _dispatch_completed(self, topic=None):
# def _dispatch_error(self, e, topic=None):
# def print_downstream(self):
# def has_subscribers(step):
# def print_from(current_seq, step):
# def pp_subscribers(self):
# def __init__(self, previous_in_chain,
# on_next=None, on_completed=None,
# on_error=None, name=None):
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def __str__(self):
# def _is_thunk(t):
# def _make_thunk(t):
# def __init__(self, func):
# def __call__(self, *args, **kwargs):
# def apply(this):
# def __repr__(self):
# def _subscribe_thunk(prev, thunk):
# def filtermethod(base, alias=None):
# def inner(func):
# def _observe(self):
# def _observe_event_loop(self):
# def _stop_loop(self):
# def __init__(self, iterable, name=None):
# def _observe(self):
# def _close(self):
# def __str__(self):
# def from_iterable(i):
# def from_list(l):
# def __init__(self, initial_state, condition, iterate, result_selector):
# def _observe(self):
# def from_func(init, cond, iter, selector):
# def __str__(self):
# def make_sensor_event(sensor, sample):
# def __init__(self, sensor, make_event_fn=make_sensor_event):
# def _observe(self):
# def __repr__(self):
# def __init__(self, scheduler, topics=None):
# def start():
# def request_stop(self):
# def _wait_and_dispatch(self):
# def _on_next(self, topic, x):
# def _on_completed(self, topic):
# def _on_error(self, topic, e):
# def _close(self):
# def __init__(self, subscriber, scheduler):
# def run(self):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, publisher, interval, scheduler):
# def _stop_loop(self):
# def run(self):
# def enqueue_fn(fn, *args):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, event_loop):
# def exception_handler(loop, context):
# def _remove_from_active_schedules(self, publisher):
# def schedule_periodic(self, publisher, interval):
# def cancel():
# def run():
# def schedule_sensor(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event,
# print_downstream=False):
# def schedule_recurring(self, publisher):
# def cancel():
# def run():
# def schedule_on_main_event_loop(self, publisher):
# def stop():
# def schedule_on_private_event_loop(self, publisher):
# def enqueue_fn(fn, *args):
# def thread_main():
# def die(): # need to stop the scheduler in the main loop
# def loop_done():
# def schedule_periodic_on_separate_thread(self, publisher, interval):
# def schedule_sensor_on_separate_thread(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event):
# def schedule_later_one_time(self, publisher, interval):
# def cancel():
# def run():
# def run_forever(self):
# def _schedule_coroutine(self, coro, done_callback):
# def cb(f):
# def stop(self):
# def recheck_stop(f):
. Output only the next line. | scheduler = Scheduler(asyncio.get_event_loop()) |
Next line prediction: <|code_start|>data from an external source "readers".
There is a more flexible csv reader class defined in
antevents.adapters.csv.
"""
class SimpleCsvReader(Publisher, DirectPublisherMixin):
"""A simple csv file reader. We assume that each row contains
a timestamp, a sensor id, and a value.
We could save some work here by subclassing from
antevents.generic.DirectReader.
"""
def __init__(self, filename, has_header_row=True):
super().__init__() # Make sure the publisher class is initialized
self.filename = filename
self.file = open(filename, 'r', newline='')
self.reader = csv.reader(self.file)
if has_header_row:
# swallow up the header row so it is not passed as data
try:
self.reader.__next__()
except Exception as e:
raise FatalError("Problem reading header row of csv file %s: %s" %
(filename, e))
def _observe(self):
try:
row = self.reader.__next__()
<|code_end|>
. Use current file imports:
(import csv
import sys
import asyncio
import antevents.linq.output # load the output method on the publisher
from antevents.base import Publisher, DirectPublisherMixin, Scheduler,\
SensorEvent, FatalError)
and context including class names, function names, or small code snippets from other files:
# Path: antevents/base.py
# class DefaultSubscriber:
# class CallableAsSubscriber:
# class FatalError(Exception):
# class InvalidTopicError(FatalError):
# class UnknownTopicError(FatalError):
# class TopicAlreadyClosed(FatalError):
# class ExcInDispatch(FatalError):
# class Publisher:
# class Filter(Publisher, DefaultSubscriber):
# class _ThunkBuilder:
# class DirectPublisherMixin:
# class EventLoopPublisherMixin:
# class IterableAsPublisher(Publisher, DirectPublisherMixin):
# class FunctionIteratorAsPublisher(Publisher, DirectPublisherMixin):
# class SensorPub(Publisher, DirectPublisherMixin):
# class BlockingSubscriber:
# class _ThreadForBlockingSubscriber(threading.Thread):
# class _ThreadForBlockingPublisher(threading.Thread):
# class ScheduleError(FatalError):
# class Scheduler:
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def _on_next_name(topic):
# def _on_error_name(topic):
# def _on_completed_name(topic):
# def __init__(self, on_next=None, on_error=None, on_completed=None,
# topic=None):
# def default_error(err):
# def __str__(self):
# def __init__(self, topics=None):
# def subscribe(self, subscriber, topic_mapping=None):
# def dispose():
# def _schedule(self, enqueue_fn):
# def _close_topic(self, topic):
# def _dispatch_next(self, x, topic=None):
# def _dispatch_completed(self, topic=None):
# def _dispatch_error(self, e, topic=None):
# def print_downstream(self):
# def has_subscribers(step):
# def print_from(current_seq, step):
# def pp_subscribers(self):
# def __init__(self, previous_in_chain,
# on_next=None, on_completed=None,
# on_error=None, name=None):
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def __str__(self):
# def _is_thunk(t):
# def _make_thunk(t):
# def __init__(self, func):
# def __call__(self, *args, **kwargs):
# def apply(this):
# def __repr__(self):
# def _subscribe_thunk(prev, thunk):
# def filtermethod(base, alias=None):
# def inner(func):
# def _observe(self):
# def _observe_event_loop(self):
# def _stop_loop(self):
# def __init__(self, iterable, name=None):
# def _observe(self):
# def _close(self):
# def __str__(self):
# def from_iterable(i):
# def from_list(l):
# def __init__(self, initial_state, condition, iterate, result_selector):
# def _observe(self):
# def from_func(init, cond, iter, selector):
# def __str__(self):
# def make_sensor_event(sensor, sample):
# def __init__(self, sensor, make_event_fn=make_sensor_event):
# def _observe(self):
# def __repr__(self):
# def __init__(self, scheduler, topics=None):
# def start():
# def request_stop(self):
# def _wait_and_dispatch(self):
# def _on_next(self, topic, x):
# def _on_completed(self, topic):
# def _on_error(self, topic, e):
# def _close(self):
# def __init__(self, subscriber, scheduler):
# def run(self):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, publisher, interval, scheduler):
# def _stop_loop(self):
# def run(self):
# def enqueue_fn(fn, *args):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, event_loop):
# def exception_handler(loop, context):
# def _remove_from_active_schedules(self, publisher):
# def schedule_periodic(self, publisher, interval):
# def cancel():
# def run():
# def schedule_sensor(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event,
# print_downstream=False):
# def schedule_recurring(self, publisher):
# def cancel():
# def run():
# def schedule_on_main_event_loop(self, publisher):
# def stop():
# def schedule_on_private_event_loop(self, publisher):
# def enqueue_fn(fn, *args):
# def thread_main():
# def die(): # need to stop the scheduler in the main loop
# def loop_done():
# def schedule_periodic_on_separate_thread(self, publisher, interval):
# def schedule_sensor_on_separate_thread(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event):
# def schedule_later_one_time(self, publisher, interval):
# def cancel():
# def run():
# def run_forever(self):
# def _schedule_coroutine(self, coro, done_callback):
# def cb(f):
# def stop(self):
# def recheck_stop(f):
. Output only the next line. | event = SensorEvent(ts=float(row[0]), sensor_id=row[1], |
Given the following code snippet before the placeholder: <|code_start|>"""
This is the example from publishers.rst. It reads a CSV-formatted spreadsheet
file and generates an event from each line. We call publishers that pull
data from an external source "readers".
There is a more flexible csv reader class defined in
antevents.adapters.csv.
"""
class SimpleCsvReader(Publisher, DirectPublisherMixin):
"""A simple csv file reader. We assume that each row contains
a timestamp, a sensor id, and a value.
We could save some work here by subclassing from
antevents.generic.DirectReader.
"""
def __init__(self, filename, has_header_row=True):
super().__init__() # Make sure the publisher class is initialized
self.filename = filename
self.file = open(filename, 'r', newline='')
self.reader = csv.reader(self.file)
if has_header_row:
# swallow up the header row so it is not passed as data
try:
self.reader.__next__()
except Exception as e:
<|code_end|>
, predict the next line using imports from the current file:
import csv
import sys
import asyncio
import antevents.linq.output # load the output method on the publisher
from antevents.base import Publisher, DirectPublisherMixin, Scheduler,\
SensorEvent, FatalError
and context including class names, function names, and sometimes code from other files:
# Path: antevents/base.py
# class DefaultSubscriber:
# class CallableAsSubscriber:
# class FatalError(Exception):
# class InvalidTopicError(FatalError):
# class UnknownTopicError(FatalError):
# class TopicAlreadyClosed(FatalError):
# class ExcInDispatch(FatalError):
# class Publisher:
# class Filter(Publisher, DefaultSubscriber):
# class _ThunkBuilder:
# class DirectPublisherMixin:
# class EventLoopPublisherMixin:
# class IterableAsPublisher(Publisher, DirectPublisherMixin):
# class FunctionIteratorAsPublisher(Publisher, DirectPublisherMixin):
# class SensorPub(Publisher, DirectPublisherMixin):
# class BlockingSubscriber:
# class _ThreadForBlockingSubscriber(threading.Thread):
# class _ThreadForBlockingPublisher(threading.Thread):
# class ScheduleError(FatalError):
# class Scheduler:
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def _on_next_name(topic):
# def _on_error_name(topic):
# def _on_completed_name(topic):
# def __init__(self, on_next=None, on_error=None, on_completed=None,
# topic=None):
# def default_error(err):
# def __str__(self):
# def __init__(self, topics=None):
# def subscribe(self, subscriber, topic_mapping=None):
# def dispose():
# def _schedule(self, enqueue_fn):
# def _close_topic(self, topic):
# def _dispatch_next(self, x, topic=None):
# def _dispatch_completed(self, topic=None):
# def _dispatch_error(self, e, topic=None):
# def print_downstream(self):
# def has_subscribers(step):
# def print_from(current_seq, step):
# def pp_subscribers(self):
# def __init__(self, previous_in_chain,
# on_next=None, on_completed=None,
# on_error=None, name=None):
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def __str__(self):
# def _is_thunk(t):
# def _make_thunk(t):
# def __init__(self, func):
# def __call__(self, *args, **kwargs):
# def apply(this):
# def __repr__(self):
# def _subscribe_thunk(prev, thunk):
# def filtermethod(base, alias=None):
# def inner(func):
# def _observe(self):
# def _observe_event_loop(self):
# def _stop_loop(self):
# def __init__(self, iterable, name=None):
# def _observe(self):
# def _close(self):
# def __str__(self):
# def from_iterable(i):
# def from_list(l):
# def __init__(self, initial_state, condition, iterate, result_selector):
# def _observe(self):
# def from_func(init, cond, iter, selector):
# def __str__(self):
# def make_sensor_event(sensor, sample):
# def __init__(self, sensor, make_event_fn=make_sensor_event):
# def _observe(self):
# def __repr__(self):
# def __init__(self, scheduler, topics=None):
# def start():
# def request_stop(self):
# def _wait_and_dispatch(self):
# def _on_next(self, topic, x):
# def _on_completed(self, topic):
# def _on_error(self, topic, e):
# def _close(self):
# def __init__(self, subscriber, scheduler):
# def run(self):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, publisher, interval, scheduler):
# def _stop_loop(self):
# def run(self):
# def enqueue_fn(fn, *args):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, event_loop):
# def exception_handler(loop, context):
# def _remove_from_active_schedules(self, publisher):
# def schedule_periodic(self, publisher, interval):
# def cancel():
# def run():
# def schedule_sensor(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event,
# print_downstream=False):
# def schedule_recurring(self, publisher):
# def cancel():
# def run():
# def schedule_on_main_event_loop(self, publisher):
# def stop():
# def schedule_on_private_event_loop(self, publisher):
# def enqueue_fn(fn, *args):
# def thread_main():
# def die(): # need to stop the scheduler in the main loop
# def loop_done():
# def schedule_periodic_on_separate_thread(self, publisher, interval):
# def schedule_sensor_on_separate_thread(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event):
# def schedule_later_one_time(self, publisher, interval):
# def cancel():
# def run():
# def run_forever(self):
# def _schedule_coroutine(self, coro, done_callback):
# def cb(f):
# def stop(self):
# def recheck_stop(f):
. Output only the next line. | raise FatalError("Problem reading header row of csv file %s: %s" % |
Given the code snippet: <|code_start|># Copyright 2016 by MPI-SWS and Data-Ken Research.
# Licensed under the Apache 2.0 License.
"""Sensors for AntEvents
Updated to suit the API changes Jeff mentioned, so that the following can be used as follows:
sensor = SensorPub(RPISensor())
The following classes allow digital/analogue sensors (which are not connected using I2C) to be connected to a Raspberry Pi/Arduino and used with AntEvents
"""
<|code_end|>
, generate the next line using the imports in this file:
import time
import RPi.GPIO as GPIO
from antevents.base import Publisher, IndirectPublisherMixin
from antevents.sensor import SensorEvent
from nanpy import ArduinoApi,SerialManager
and context (functions, classes, or occasionally code) from other files:
# Path: antevents/base.py
# class DefaultSubscriber:
# class CallableAsSubscriber:
# class FatalError(Exception):
# class InvalidTopicError(FatalError):
# class UnknownTopicError(FatalError):
# class TopicAlreadyClosed(FatalError):
# class ExcInDispatch(FatalError):
# class Publisher:
# class Filter(Publisher, DefaultSubscriber):
# class _ThunkBuilder:
# class DirectPublisherMixin:
# class EventLoopPublisherMixin:
# class IterableAsPublisher(Publisher, DirectPublisherMixin):
# class FunctionIteratorAsPublisher(Publisher, DirectPublisherMixin):
# class SensorPub(Publisher, DirectPublisherMixin):
# class BlockingSubscriber:
# class _ThreadForBlockingSubscriber(threading.Thread):
# class _ThreadForBlockingPublisher(threading.Thread):
# class ScheduleError(FatalError):
# class Scheduler:
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def _on_next_name(topic):
# def _on_error_name(topic):
# def _on_completed_name(topic):
# def __init__(self, on_next=None, on_error=None, on_completed=None,
# topic=None):
# def default_error(err):
# def __str__(self):
# def __init__(self, topics=None):
# def subscribe(self, subscriber, topic_mapping=None):
# def dispose():
# def _schedule(self, enqueue_fn):
# def _close_topic(self, topic):
# def _dispatch_next(self, x, topic=None):
# def _dispatch_completed(self, topic=None):
# def _dispatch_error(self, e, topic=None):
# def print_downstream(self):
# def has_subscribers(step):
# def print_from(current_seq, step):
# def pp_subscribers(self):
# def __init__(self, previous_in_chain,
# on_next=None, on_completed=None,
# on_error=None, name=None):
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def __str__(self):
# def _is_thunk(t):
# def _make_thunk(t):
# def __init__(self, func):
# def __call__(self, *args, **kwargs):
# def apply(this):
# def __repr__(self):
# def _subscribe_thunk(prev, thunk):
# def filtermethod(base, alias=None):
# def inner(func):
# def _observe(self):
# def _observe_event_loop(self):
# def _stop_loop(self):
# def __init__(self, iterable, name=None):
# def _observe(self):
# def _close(self):
# def __str__(self):
# def from_iterable(i):
# def from_list(l):
# def __init__(self, initial_state, condition, iterate, result_selector):
# def _observe(self):
# def from_func(init, cond, iter, selector):
# def __str__(self):
# def make_sensor_event(sensor, sample):
# def __init__(self, sensor, make_event_fn=make_sensor_event):
# def _observe(self):
# def __repr__(self):
# def __init__(self, scheduler, topics=None):
# def start():
# def request_stop(self):
# def _wait_and_dispatch(self):
# def _on_next(self, topic, x):
# def _on_completed(self, topic):
# def _on_error(self, topic, e):
# def _close(self):
# def __init__(self, subscriber, scheduler):
# def run(self):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, publisher, interval, scheduler):
# def _stop_loop(self):
# def run(self):
# def enqueue_fn(fn, *args):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, event_loop):
# def exception_handler(loop, context):
# def _remove_from_active_schedules(self, publisher):
# def schedule_periodic(self, publisher, interval):
# def cancel():
# def run():
# def schedule_sensor(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event,
# print_downstream=False):
# def schedule_recurring(self, publisher):
# def cancel():
# def run():
# def schedule_on_main_event_loop(self, publisher):
# def stop():
# def schedule_on_private_event_loop(self, publisher):
# def enqueue_fn(fn, *args):
# def thread_main():
# def die(): # need to stop the scheduler in the main loop
# def loop_done():
# def schedule_periodic_on_separate_thread(self, publisher, interval):
# def schedule_sensor_on_separate_thread(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event):
# def schedule_later_one_time(self, publisher, interval):
# def cancel():
# def run():
# def run_forever(self):
# def _schedule_coroutine(self, coro, done_callback):
# def cb(f):
# def stop(self):
# def recheck_stop(f):
. Output only the next line. | class RPISensor(Publisher, IndirectPublisherMixin): |
Given snippet: <|code_start|># Copyright 2016 by MPI-SWS and Data-Ken Research.
# Licensed under the Apache 2.0 License.
"""Sensors for AntEvents
Updated to suit the API changes Jeff mentioned, so that the following can be used as follows:
sensor = SensorPub(RPISensor())
The following classes allow digital/analogue sensors (which are not connected using I2C) to be connected to a Raspberry Pi/Arduino and used with AntEvents
"""
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import time
import RPi.GPIO as GPIO
from antevents.base import Publisher, IndirectPublisherMixin
from antevents.sensor import SensorEvent
from nanpy import ArduinoApi,SerialManager
and context:
# Path: antevents/base.py
# class DefaultSubscriber:
# class CallableAsSubscriber:
# class FatalError(Exception):
# class InvalidTopicError(FatalError):
# class UnknownTopicError(FatalError):
# class TopicAlreadyClosed(FatalError):
# class ExcInDispatch(FatalError):
# class Publisher:
# class Filter(Publisher, DefaultSubscriber):
# class _ThunkBuilder:
# class DirectPublisherMixin:
# class EventLoopPublisherMixin:
# class IterableAsPublisher(Publisher, DirectPublisherMixin):
# class FunctionIteratorAsPublisher(Publisher, DirectPublisherMixin):
# class SensorPub(Publisher, DirectPublisherMixin):
# class BlockingSubscriber:
# class _ThreadForBlockingSubscriber(threading.Thread):
# class _ThreadForBlockingPublisher(threading.Thread):
# class ScheduleError(FatalError):
# class Scheduler:
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def _on_next_name(topic):
# def _on_error_name(topic):
# def _on_completed_name(topic):
# def __init__(self, on_next=None, on_error=None, on_completed=None,
# topic=None):
# def default_error(err):
# def __str__(self):
# def __init__(self, topics=None):
# def subscribe(self, subscriber, topic_mapping=None):
# def dispose():
# def _schedule(self, enqueue_fn):
# def _close_topic(self, topic):
# def _dispatch_next(self, x, topic=None):
# def _dispatch_completed(self, topic=None):
# def _dispatch_error(self, e, topic=None):
# def print_downstream(self):
# def has_subscribers(step):
# def print_from(current_seq, step):
# def pp_subscribers(self):
# def __init__(self, previous_in_chain,
# on_next=None, on_completed=None,
# on_error=None, name=None):
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def __str__(self):
# def _is_thunk(t):
# def _make_thunk(t):
# def __init__(self, func):
# def __call__(self, *args, **kwargs):
# def apply(this):
# def __repr__(self):
# def _subscribe_thunk(prev, thunk):
# def filtermethod(base, alias=None):
# def inner(func):
# def _observe(self):
# def _observe_event_loop(self):
# def _stop_loop(self):
# def __init__(self, iterable, name=None):
# def _observe(self):
# def _close(self):
# def __str__(self):
# def from_iterable(i):
# def from_list(l):
# def __init__(self, initial_state, condition, iterate, result_selector):
# def _observe(self):
# def from_func(init, cond, iter, selector):
# def __str__(self):
# def make_sensor_event(sensor, sample):
# def __init__(self, sensor, make_event_fn=make_sensor_event):
# def _observe(self):
# def __repr__(self):
# def __init__(self, scheduler, topics=None):
# def start():
# def request_stop(self):
# def _wait_and_dispatch(self):
# def _on_next(self, topic, x):
# def _on_completed(self, topic):
# def _on_error(self, topic, e):
# def _close(self):
# def __init__(self, subscriber, scheduler):
# def run(self):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, publisher, interval, scheduler):
# def _stop_loop(self):
# def run(self):
# def enqueue_fn(fn, *args):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, event_loop):
# def exception_handler(loop, context):
# def _remove_from_active_schedules(self, publisher):
# def schedule_periodic(self, publisher, interval):
# def cancel():
# def run():
# def schedule_sensor(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event,
# print_downstream=False):
# def schedule_recurring(self, publisher):
# def cancel():
# def run():
# def schedule_on_main_event_loop(self, publisher):
# def stop():
# def schedule_on_private_event_loop(self, publisher):
# def enqueue_fn(fn, *args):
# def thread_main():
# def die(): # need to stop the scheduler in the main loop
# def loop_done():
# def schedule_periodic_on_separate_thread(self, publisher, interval):
# def schedule_sensor_on_separate_thread(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event):
# def schedule_later_one_time(self, publisher, interval):
# def cancel():
# def run():
# def run_forever(self):
# def _schedule_coroutine(self, coro, done_callback):
# def cb(f):
# def stop(self):
# def recheck_stop(f):
which might include code, classes, or functions. Output only the next line. | class RPISensor(Publisher, IndirectPublisherMixin): |
Using the snippet: <|code_start|> p.yaxis.axis_label = plot_specs.y_axis_label
p.x_range.follow = "end"
p.x_range.follow_interval = 10
p.x_range.range_padding = 0
# p.xaxis.formatter=DatetimeTickFormatter(dict(seconds=["%S"],minutes=["%M"],hours=["%d %B %Y"],days=["%d %B %Y"],months=["%d %B %Y"],years=["%d %B %Y"]))
p.xaxis.major_label_orientation = pi/4
p.line(x=plot_specs.x_axis_label, y=plot_specs.y_axis_label, color="blue", source=plot_specs.source)
p.circle(x=plot_specs.x_axis_label, y=plot_specs.y_axis_label, color="red", source=plot_specs.source)
curdoc().add_periodic_callback(functools.partial(self.update, name=plot_specs.name), plot_specs.update_period) #period in ms
return p
def run(self):
print("In thread.run")
self.figs = [self.make_fig(self.plotters[name]) for name in self.plotters]
self.session = push_session(curdoc())
self.session.show(column(self.figs))
curdoc().title = 'AntEvent Streams'
self.session.loop_until_closed()
class BokehPlot(object):
def __init__(self, name, y_axis_label="", x_axis_label="timestamp", update_period_in_ms=500):
self.name = name
self.x_axis_label = x_axis_label
self.y_axis_label = y_axis_label
self.update_period = update_period_in_ms
self.source = ColumnDataSource(dict({ self.x_axis_label: [], self.y_axis_label: []} ))
<|code_end|>
, determine the next line of code. You have imports:
import datetime
import logging
import functools
import threading, queue
import numpy as np
from math import pi
from bokeh.charts import TimeSeries, show, output_file, output_server
from bokeh.plotting import figure, curdoc
from bokeh.layouts import column # to show two or more plots arranged in a column
from bokeh.models import ColumnDataSource
from bokeh.models import DatetimeTickFormatter
from bokeh.client import push_session
from antevents.base import Filter, filtermethod
and context (class names, function names, or code) available:
# Path: antevents/base.py
# class Filter(Publisher, DefaultSubscriber):
# def __init__(self, previous_in_chain,
# on_next=None, on_completed=None,
# on_error=None, name=None):
# super().__init__()
# self._on_next = on_next
# self._on_completed = on_completed
# self._on_error = on_error
# self.name = name
# self.dispose = previous_in_chain.subscribe(self) # XXX how to use this?
#
# def on_next(self, x):
# if self._on_next:
# try:
# self._on_next(self, x)
# except FatalError:
# raise
# except Exception as e:
# logger.exception("Got an exception on %s.on_next(%s)" %
# (self, x))
# self.on_error(e)
# self.dispose() # stop from getting upstream events
# else:
# self._dispatch_next(x)
#
# def on_error(self, e):
# if self._on_error:
# self._on_error(self, e)
# else:
# self._dispatch_error(e)
#
# def on_completed(self):
# if self._on_completed:
# self._on_completed(self)
# else:
# self._dispatch_completed()
#
# def __str__(self):
# if hasattr(self, 'name') and self.name:
# return self.name
# else:
# return super().__str__()
#
# def filtermethod(base, alias=None):
# """Function decorator that creates a linq-style filter out of the
# specified function. As described in the antevents.linq documentation,
# it should take a Publisher as its first argument (the source of events)
# and return a Publisher (representing the end the filter sequence once
# the filter is included. The returned Publisher is typically an instance
# of antevents.base.Filter.
#
# The specified function is used in two places:
# 1. A method with the specified name is added to the specified class
# (usually the Publisher base class). This is for the fluent (method
# chaining) API.
# 2. A function is created in the local namespace for use in the functional API.
# This function does not take the publisher as an argument. Instead,
# it takes the remaining arguments and then returns a function which,
# when passed a publisher, subscribes to it and returns a filter.
#
# Decorator arguments:
# :param T base: Base class to extend with method
# (usually antevents.base.Publisher)
# :param string alias: an alias for this function or list of aliases
# (e.g. map for select, etc.).
#
# :returns: A function that takes the class to be decorated.
# :rtype: func -> func
#
# This was adapted from the RxPy extensionmethod decorator.
# """
# def inner(func):
# """This function is returned by the outer filtermethod()
#
# :param types.FunctionType func: Function to be decorated
# """
#
# func_names = [func.__name__,]
# if alias:
# aliases = alias if isinstance(alias, list) else [alias]
# func_names += aliases
#
# _thunk = _ThunkBuilder(func)
#
# # For the primary name and all aliases, set the name on the
# # base class as well as in the local namespace.
# for func_name in func_names:
# setattr(base, func_name, func)
# func.__globals__[func_name] = _thunk
# return _thunk
# return inner
. Output only the next line. | class BokehPlotManager(Filter): |
Continue the code snippet: <|code_start|>
class ValueListSensor:
def __init__(self, sensor_id, values):
self.sensor_id = sensor_id
def generator():
for v in values:
yield v
self.generator = generator()
def sample(self):
return self.generator.__next__()
def __repr__(self):
return 'ValueListSensor(%s)' % self.sensor_id
def make_test_publisher(sensor_id, mean=100.0, stddev=20.0, stop_after_events=None):
"""Here is an exmple test publisher that generates a random value"""
if stop_after_events is not None:
def generator():
for i in range(stop_after_events):
yield SensorEvent(sensor_id, time.time(),
random.gauss(mean, stddev))
else: # go on forever
def generator():
while True:
yield SensorEvent(sensor_id, time.time(),
random.gauss(mean, stddev))
g = generator()
<|code_end|>
. Use current file imports:
import time
import unittest
import random
from antevents.base import IterableAsPublisher, DefaultSubscriber, FatalError,\
SensorEvent
and context (classes, functions, or code) from other files:
# Path: antevents/base.py
# class DefaultSubscriber:
# class CallableAsSubscriber:
# class FatalError(Exception):
# class InvalidTopicError(FatalError):
# class UnknownTopicError(FatalError):
# class TopicAlreadyClosed(FatalError):
# class ExcInDispatch(FatalError):
# class Publisher:
# class Filter(Publisher, DefaultSubscriber):
# class _ThunkBuilder:
# class DirectPublisherMixin:
# class EventLoopPublisherMixin:
# class IterableAsPublisher(Publisher, DirectPublisherMixin):
# class FunctionIteratorAsPublisher(Publisher, DirectPublisherMixin):
# class SensorPub(Publisher, DirectPublisherMixin):
# class BlockingSubscriber:
# class _ThreadForBlockingSubscriber(threading.Thread):
# class _ThreadForBlockingPublisher(threading.Thread):
# class ScheduleError(FatalError):
# class Scheduler:
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def _on_next_name(topic):
# def _on_error_name(topic):
# def _on_completed_name(topic):
# def __init__(self, on_next=None, on_error=None, on_completed=None,
# topic=None):
# def default_error(err):
# def __str__(self):
# def __init__(self, topics=None):
# def subscribe(self, subscriber, topic_mapping=None):
# def dispose():
# def _schedule(self, enqueue_fn):
# def _close_topic(self, topic):
# def _dispatch_next(self, x, topic=None):
# def _dispatch_completed(self, topic=None):
# def _dispatch_error(self, e, topic=None):
# def print_downstream(self):
# def has_subscribers(step):
# def print_from(current_seq, step):
# def pp_subscribers(self):
# def __init__(self, previous_in_chain,
# on_next=None, on_completed=None,
# on_error=None, name=None):
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def __str__(self):
# def _is_thunk(t):
# def _make_thunk(t):
# def __init__(self, func):
# def __call__(self, *args, **kwargs):
# def apply(this):
# def __repr__(self):
# def _subscribe_thunk(prev, thunk):
# def filtermethod(base, alias=None):
# def inner(func):
# def _observe(self):
# def _observe_event_loop(self):
# def _stop_loop(self):
# def __init__(self, iterable, name=None):
# def _observe(self):
# def _close(self):
# def __str__(self):
# def from_iterable(i):
# def from_list(l):
# def __init__(self, initial_state, condition, iterate, result_selector):
# def _observe(self):
# def from_func(init, cond, iter, selector):
# def __str__(self):
# def make_sensor_event(sensor, sample):
# def __init__(self, sensor, make_event_fn=make_sensor_event):
# def _observe(self):
# def __repr__(self):
# def __init__(self, scheduler, topics=None):
# def start():
# def request_stop(self):
# def _wait_and_dispatch(self):
# def _on_next(self, topic, x):
# def _on_completed(self, topic):
# def _on_error(self, topic, e):
# def _close(self):
# def __init__(self, subscriber, scheduler):
# def run(self):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, publisher, interval, scheduler):
# def _stop_loop(self):
# def run(self):
# def enqueue_fn(fn, *args):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, event_loop):
# def exception_handler(loop, context):
# def _remove_from_active_schedules(self, publisher):
# def schedule_periodic(self, publisher, interval):
# def cancel():
# def run():
# def schedule_sensor(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event,
# print_downstream=False):
# def schedule_recurring(self, publisher):
# def cancel():
# def run():
# def schedule_on_main_event_loop(self, publisher):
# def stop():
# def schedule_on_private_event_loop(self, publisher):
# def enqueue_fn(fn, *args):
# def thread_main():
# def die(): # need to stop the scheduler in the main loop
# def loop_done():
# def schedule_periodic_on_separate_thread(self, publisher, interval):
# def schedule_sensor_on_separate_thread(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event):
# def schedule_later_one_time(self, publisher, interval):
# def cancel():
# def run():
# def run_forever(self):
# def _schedule_coroutine(self, coro, done_callback):
# def cb(f):
# def stop(self):
# def recheck_stop(f):
. Output only the next line. | o = IterableAsPublisher(g, name='Sensor(%s)' % sensor_id) |
Given the following code snippet before the placeholder: <|code_start|>
def make_test_publisher(sensor_id, mean=100.0, stddev=20.0, stop_after_events=None):
"""Here is an exmple test publisher that generates a random value"""
if stop_after_events is not None:
def generator():
for i in range(stop_after_events):
yield SensorEvent(sensor_id, time.time(),
random.gauss(mean, stddev))
else: # go on forever
def generator():
while True:
yield SensorEvent(sensor_id, time.time(),
random.gauss(mean, stddev))
g = generator()
o = IterableAsPublisher(g, name='Sensor(%s)' % sensor_id)
return o
def make_test_publisher_from_vallist(sensor_id, values):
"""Create a publisher that generates the list of values when sampled, but uses
real timestamps.
"""
def generator():
for val in values:
yield SensorEvent(sensor_id, time.time(), val)
o = IterableAsPublisher(generator(), name='Sensor(%s)' % sensor_id)
return o
<|code_end|>
, predict the next line using imports from the current file:
import time
import unittest
import random
from antevents.base import IterableAsPublisher, DefaultSubscriber, FatalError,\
SensorEvent
and context including class names, function names, and sometimes code from other files:
# Path: antevents/base.py
# class DefaultSubscriber:
# class CallableAsSubscriber:
# class FatalError(Exception):
# class InvalidTopicError(FatalError):
# class UnknownTopicError(FatalError):
# class TopicAlreadyClosed(FatalError):
# class ExcInDispatch(FatalError):
# class Publisher:
# class Filter(Publisher, DefaultSubscriber):
# class _ThunkBuilder:
# class DirectPublisherMixin:
# class EventLoopPublisherMixin:
# class IterableAsPublisher(Publisher, DirectPublisherMixin):
# class FunctionIteratorAsPublisher(Publisher, DirectPublisherMixin):
# class SensorPub(Publisher, DirectPublisherMixin):
# class BlockingSubscriber:
# class _ThreadForBlockingSubscriber(threading.Thread):
# class _ThreadForBlockingPublisher(threading.Thread):
# class ScheduleError(FatalError):
# class Scheduler:
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def _on_next_name(topic):
# def _on_error_name(topic):
# def _on_completed_name(topic):
# def __init__(self, on_next=None, on_error=None, on_completed=None,
# topic=None):
# def default_error(err):
# def __str__(self):
# def __init__(self, topics=None):
# def subscribe(self, subscriber, topic_mapping=None):
# def dispose():
# def _schedule(self, enqueue_fn):
# def _close_topic(self, topic):
# def _dispatch_next(self, x, topic=None):
# def _dispatch_completed(self, topic=None):
# def _dispatch_error(self, e, topic=None):
# def print_downstream(self):
# def has_subscribers(step):
# def print_from(current_seq, step):
# def pp_subscribers(self):
# def __init__(self, previous_in_chain,
# on_next=None, on_completed=None,
# on_error=None, name=None):
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def __str__(self):
# def _is_thunk(t):
# def _make_thunk(t):
# def __init__(self, func):
# def __call__(self, *args, **kwargs):
# def apply(this):
# def __repr__(self):
# def _subscribe_thunk(prev, thunk):
# def filtermethod(base, alias=None):
# def inner(func):
# def _observe(self):
# def _observe_event_loop(self):
# def _stop_loop(self):
# def __init__(self, iterable, name=None):
# def _observe(self):
# def _close(self):
# def __str__(self):
# def from_iterable(i):
# def from_list(l):
# def __init__(self, initial_state, condition, iterate, result_selector):
# def _observe(self):
# def from_func(init, cond, iter, selector):
# def __str__(self):
# def make_sensor_event(sensor, sample):
# def __init__(self, sensor, make_event_fn=make_sensor_event):
# def _observe(self):
# def __repr__(self):
# def __init__(self, scheduler, topics=None):
# def start():
# def request_stop(self):
# def _wait_and_dispatch(self):
# def _on_next(self, topic, x):
# def _on_completed(self, topic):
# def _on_error(self, topic, e):
# def _close(self):
# def __init__(self, subscriber, scheduler):
# def run(self):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, publisher, interval, scheduler):
# def _stop_loop(self):
# def run(self):
# def enqueue_fn(fn, *args):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, event_loop):
# def exception_handler(loop, context):
# def _remove_from_active_schedules(self, publisher):
# def schedule_periodic(self, publisher, interval):
# def cancel():
# def run():
# def schedule_sensor(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event,
# print_downstream=False):
# def schedule_recurring(self, publisher):
# def cancel():
# def run():
# def schedule_on_main_event_loop(self, publisher):
# def stop():
# def schedule_on_private_event_loop(self, publisher):
# def enqueue_fn(fn, *args):
# def thread_main():
# def die(): # need to stop the scheduler in the main loop
# def loop_done():
# def schedule_periodic_on_separate_thread(self, publisher, interval):
# def schedule_sensor_on_separate_thread(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event):
# def schedule_later_one_time(self, publisher, interval):
# def cancel():
# def run():
# def run_forever(self):
# def _schedule_coroutine(self, coro, done_callback):
# def cb(f):
# def stop(self):
# def recheck_stop(f):
. Output only the next line. | class ValidationSubscriber(DefaultSubscriber): |
Given the code snippet: <|code_start|> """A version of ValidationSubscriber that calls a stop
function after the specified events have been received.
"""
def __init__(self, expected_stream, test_case, stop_fn,
extract_value_fn=lambda event:event.val):
super().__init__(expected_stream, test_case,
extract_value_fn=extract_value_fn)
self.stop_fn = stop_fn
def on_next(self, x):
super().on_next(x)
if self.next_idx==len(self.expected_stream):
print("ValidateAndStopSubscriber: stopping")
self.stop_fn()
class CaptureSubscriber(DefaultSubscriber):
"""Capture the sequence of events in a list for later use.
"""
def __init__(self):
self.events = []
self.completed = False
def on_next(self, x):
self.events.append(x)
def on_completed(self):
self.completed = True
def on_error(self, e):
<|code_end|>
, generate the next line using the imports in this file:
import time
import unittest
import random
from antevents.base import IterableAsPublisher, DefaultSubscriber, FatalError,\
SensorEvent
and context (functions, classes, or occasionally code) from other files:
# Path: antevents/base.py
# class DefaultSubscriber:
# class CallableAsSubscriber:
# class FatalError(Exception):
# class InvalidTopicError(FatalError):
# class UnknownTopicError(FatalError):
# class TopicAlreadyClosed(FatalError):
# class ExcInDispatch(FatalError):
# class Publisher:
# class Filter(Publisher, DefaultSubscriber):
# class _ThunkBuilder:
# class DirectPublisherMixin:
# class EventLoopPublisherMixin:
# class IterableAsPublisher(Publisher, DirectPublisherMixin):
# class FunctionIteratorAsPublisher(Publisher, DirectPublisherMixin):
# class SensorPub(Publisher, DirectPublisherMixin):
# class BlockingSubscriber:
# class _ThreadForBlockingSubscriber(threading.Thread):
# class _ThreadForBlockingPublisher(threading.Thread):
# class ScheduleError(FatalError):
# class Scheduler:
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def _on_next_name(topic):
# def _on_error_name(topic):
# def _on_completed_name(topic):
# def __init__(self, on_next=None, on_error=None, on_completed=None,
# topic=None):
# def default_error(err):
# def __str__(self):
# def __init__(self, topics=None):
# def subscribe(self, subscriber, topic_mapping=None):
# def dispose():
# def _schedule(self, enqueue_fn):
# def _close_topic(self, topic):
# def _dispatch_next(self, x, topic=None):
# def _dispatch_completed(self, topic=None):
# def _dispatch_error(self, e, topic=None):
# def print_downstream(self):
# def has_subscribers(step):
# def print_from(current_seq, step):
# def pp_subscribers(self):
# def __init__(self, previous_in_chain,
# on_next=None, on_completed=None,
# on_error=None, name=None):
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def __str__(self):
# def _is_thunk(t):
# def _make_thunk(t):
# def __init__(self, func):
# def __call__(self, *args, **kwargs):
# def apply(this):
# def __repr__(self):
# def _subscribe_thunk(prev, thunk):
# def filtermethod(base, alias=None):
# def inner(func):
# def _observe(self):
# def _observe_event_loop(self):
# def _stop_loop(self):
# def __init__(self, iterable, name=None):
# def _observe(self):
# def _close(self):
# def __str__(self):
# def from_iterable(i):
# def from_list(l):
# def __init__(self, initial_state, condition, iterate, result_selector):
# def _observe(self):
# def from_func(init, cond, iter, selector):
# def __str__(self):
# def make_sensor_event(sensor, sample):
# def __init__(self, sensor, make_event_fn=make_sensor_event):
# def _observe(self):
# def __repr__(self):
# def __init__(self, scheduler, topics=None):
# def start():
# def request_stop(self):
# def _wait_and_dispatch(self):
# def _on_next(self, topic, x):
# def _on_completed(self, topic):
# def _on_error(self, topic, e):
# def _close(self):
# def __init__(self, subscriber, scheduler):
# def run(self):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, publisher, interval, scheduler):
# def _stop_loop(self):
# def run(self):
# def enqueue_fn(fn, *args):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, event_loop):
# def exception_handler(loop, context):
# def _remove_from_active_schedules(self, publisher):
# def schedule_periodic(self, publisher, interval):
# def cancel():
# def run():
# def schedule_sensor(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event,
# print_downstream=False):
# def schedule_recurring(self, publisher):
# def cancel():
# def run():
# def schedule_on_main_event_loop(self, publisher):
# def stop():
# def schedule_on_private_event_loop(self, publisher):
# def enqueue_fn(fn, *args):
# def thread_main():
# def die(): # need to stop the scheduler in the main loop
# def loop_done():
# def schedule_periodic_on_separate_thread(self, publisher, interval):
# def schedule_sensor_on_separate_thread(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event):
# def schedule_later_one_time(self, publisher, interval):
# def cancel():
# def run():
# def run_forever(self):
# def _schedule_coroutine(self, coro, done_callback):
# def cb(f):
# def stop(self):
# def recheck_stop(f):
. Output only the next line. | raise FatalError("Should not get on_error, got on_error(%s)" % e) |
Next line prediction: <|code_start|>
def __repr__(self):
if self.stop_after_events is None:
return 'RandomSensor(%s, mean=%s, stddev=%s)' % \
(self.sensor_id, self.mean, self.stddev)
else:
return 'RandomSensor(%s, mean=%s, stddev=%s, stop_after_events=%s)' % \
(self.sensor_id, self.mean, self.stddev, self.stop_after_events)
class ValueListSensor:
def __init__(self, sensor_id, values):
self.sensor_id = sensor_id
def generator():
for v in values:
yield v
self.generator = generator()
def sample(self):
return self.generator.__next__()
def __repr__(self):
return 'ValueListSensor(%s)' % self.sensor_id
def make_test_publisher(sensor_id, mean=100.0, stddev=20.0, stop_after_events=None):
"""Here is an exmple test publisher that generates a random value"""
if stop_after_events is not None:
def generator():
for i in range(stop_after_events):
<|code_end|>
. Use current file imports:
(import time
import unittest
import random
from antevents.base import IterableAsPublisher, DefaultSubscriber, FatalError,\
SensorEvent)
and context including class names, function names, or small code snippets from other files:
# Path: antevents/base.py
# class DefaultSubscriber:
# class CallableAsSubscriber:
# class FatalError(Exception):
# class InvalidTopicError(FatalError):
# class UnknownTopicError(FatalError):
# class TopicAlreadyClosed(FatalError):
# class ExcInDispatch(FatalError):
# class Publisher:
# class Filter(Publisher, DefaultSubscriber):
# class _ThunkBuilder:
# class DirectPublisherMixin:
# class EventLoopPublisherMixin:
# class IterableAsPublisher(Publisher, DirectPublisherMixin):
# class FunctionIteratorAsPublisher(Publisher, DirectPublisherMixin):
# class SensorPub(Publisher, DirectPublisherMixin):
# class BlockingSubscriber:
# class _ThreadForBlockingSubscriber(threading.Thread):
# class _ThreadForBlockingPublisher(threading.Thread):
# class ScheduleError(FatalError):
# class Scheduler:
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def _on_next_name(topic):
# def _on_error_name(topic):
# def _on_completed_name(topic):
# def __init__(self, on_next=None, on_error=None, on_completed=None,
# topic=None):
# def default_error(err):
# def __str__(self):
# def __init__(self, topics=None):
# def subscribe(self, subscriber, topic_mapping=None):
# def dispose():
# def _schedule(self, enqueue_fn):
# def _close_topic(self, topic):
# def _dispatch_next(self, x, topic=None):
# def _dispatch_completed(self, topic=None):
# def _dispatch_error(self, e, topic=None):
# def print_downstream(self):
# def has_subscribers(step):
# def print_from(current_seq, step):
# def pp_subscribers(self):
# def __init__(self, previous_in_chain,
# on_next=None, on_completed=None,
# on_error=None, name=None):
# def on_next(self, x):
# def on_error(self, e):
# def on_completed(self):
# def __str__(self):
# def _is_thunk(t):
# def _make_thunk(t):
# def __init__(self, func):
# def __call__(self, *args, **kwargs):
# def apply(this):
# def __repr__(self):
# def _subscribe_thunk(prev, thunk):
# def filtermethod(base, alias=None):
# def inner(func):
# def _observe(self):
# def _observe_event_loop(self):
# def _stop_loop(self):
# def __init__(self, iterable, name=None):
# def _observe(self):
# def _close(self):
# def __str__(self):
# def from_iterable(i):
# def from_list(l):
# def __init__(self, initial_state, condition, iterate, result_selector):
# def _observe(self):
# def from_func(init, cond, iter, selector):
# def __str__(self):
# def make_sensor_event(sensor, sample):
# def __init__(self, sensor, make_event_fn=make_sensor_event):
# def _observe(self):
# def __repr__(self):
# def __init__(self, scheduler, topics=None):
# def start():
# def request_stop(self):
# def _wait_and_dispatch(self):
# def _on_next(self, topic, x):
# def _on_completed(self, topic):
# def _on_error(self, topic, e):
# def _close(self):
# def __init__(self, subscriber, scheduler):
# def run(self):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, publisher, interval, scheduler):
# def _stop_loop(self):
# def run(self):
# def enqueue_fn(fn, *args):
# def die(): # need to stop the scheduler in the main loop
# def done():
# def __init__(self, event_loop):
# def exception_handler(loop, context):
# def _remove_from_active_schedules(self, publisher):
# def schedule_periodic(self, publisher, interval):
# def cancel():
# def run():
# def schedule_sensor(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event,
# print_downstream=False):
# def schedule_recurring(self, publisher):
# def cancel():
# def run():
# def schedule_on_main_event_loop(self, publisher):
# def stop():
# def schedule_on_private_event_loop(self, publisher):
# def enqueue_fn(fn, *args):
# def thread_main():
# def die(): # need to stop the scheduler in the main loop
# def loop_done():
# def schedule_periodic_on_separate_thread(self, publisher, interval):
# def schedule_sensor_on_separate_thread(self, sensor, interval, *subscriber_sequence,
# make_event_fn=make_sensor_event):
# def schedule_later_one_time(self, publisher, interval):
# def cancel():
# def run():
# def run_forever(self):
# def _schedule_coroutine(self, coro, done_callback):
# def cb(f):
# def stop(self):
# def recheck_stop(f):
. Output only the next line. | yield SensorEvent(sensor_id, time.time(), |
Using the snippet: <|code_start|># Copyright 2016 by MPI-SWS and Data-Ken Research.
# Licensed under the Apache 2.0 License.
"""
Pandas (http://pandas.pydata.org) is a data analysis library.
This module contains adapters for converting between antevents
event streams and Pandas data types.
"""
<|code_end|>
, determine the next line of code. You have imports:
import datetime
import pandas as pd
from antevents.base import DefaultSubscriber
and context (class names, function names, or code) available:
# Path: antevents/base.py
# class DefaultSubscriber:
# """This is the interface to be implemented by a subscriber
# which consumes the events from an publisher when subscribing
# on the default topic.
# """
# def on_next(self, x):
# pass
#
# def on_error(self, e):
# pass
#
# def on_completed(self):
# pass
. Output only the next line. | class PandasSeriesWriter(DefaultSubscriber): |
Continue the code snippet: <|code_start|>from __future__ import print_function
N = 10000
def main(data):
<|code_end|>
. Use current file imports:
import cProfile
import StringIO
import pstats
from hippiehug import DocChain
from os import urandom
and context (classes, functions, or code) from other files:
# Path: hippiehug-package/hippiehug/Chain.py
# class DocChain(Chain):
# """A chain that stores hashes of documents. Construct like a *Chain*."""
#
# def multi_add(self, items):
# """Add multiple items to seal a new block."""
#
# docs = list(map(Document, items))
# for d in docs:
# self.store[d.hid] = d
#
# docs_id = list(map(lambda d: d.hid, docs))
# Chain.multi_add(self, docs_id)
#
# def get(self, block_index, item_index, evidence=None):
# """Get a sealed item, and optionally a bundle of evidence."""
#
# ## Get Doc and check its hash
# item = Chain.get(self, block_index, item_index, evidence)
# d = self.store[item]
# check_hash(item, d)
#
# if evidence != None:
# evidence[d.hid] = d
#
# return self.store[item].item
#
# def check(self, root, block_index, item_index, item):
# """Check that an item is within the structure at a specific point."""
# ret = True
# ret = ret and (self.root() == root)
# ret = ret and (self.get(block_index, item_index) == item)
# return ret
. Output only the next line. | c = DocChain() |
Given snippet: <|code_start|> q.task_done()
class S3Chain():
def __init__(self, chain_name):
""" Initialize the S3 chain with an S3 bucket name. """
self.name = chain_name
self.cache = {}
# Make a connection to AWS S3
self.s3 = boto3.resource('s3')
try:
self.s3.create_bucket(Bucket=self.name, ACL='public-read',
CreateBucketConfiguration={'LocationConstraint': 'eu-west-1'})
except:
pass
finally:
self.bucket = self.s3.Bucket(self.name)
# Get the old root if possible
new_root = None
try:
o = self.s3.Object(self.name, "/root")
if o.metadata["type"] == "Root":
new_root = o.get()["Body"].read()
except Exception as e:
pass
# Initialize the chain
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from .Chain import DocChain, Document, Block
from .Utils import ascii_hash
from binascii import hexlify
from os import urandom
from json import dumps, loads
from Queue import Queue as Queue
from queue import Queue as Queue
from threading import Thread
import boto3
and context:
# Path: hippiehug-package/hippiehug/Chain.py
# class DocChain(Chain):
# """A chain that stores hashes of documents. Construct like a *Chain*."""
#
# def multi_add(self, items):
# """Add multiple items to seal a new block."""
#
# docs = list(map(Document, items))
# for d in docs:
# self.store[d.hid] = d
#
# docs_id = list(map(lambda d: d.hid, docs))
# Chain.multi_add(self, docs_id)
#
# def get(self, block_index, item_index, evidence=None):
# """Get a sealed item, and optionally a bundle of evidence."""
#
# ## Get Doc and check its hash
# item = Chain.get(self, block_index, item_index, evidence)
# d = self.store[item]
# check_hash(item, d)
#
# if evidence != None:
# evidence[d.hid] = d
#
# return self.store[item].item
#
# def check(self, root, block_index, item_index, item):
# """Check that an item is within the structure at a specific point."""
# ret = True
# ret = ret and (self.root() == root)
# ret = ret and (self.get(block_index, item_index) == item)
# return ret
#
# class Document:
# __slots__ = ["item", "hid"]
#
# def __init__(self, item):
# self.item = item
# """ The item stored in the Leaf. """
#
# self.hid = binary_hash(packb(("D", self.item)))
#
# def identity(self):
# """ Returns the hash ID of the Leaf. """
# return self.hid
#
# def __eq__(self, other):
# return self.hid == other.hid
#
# class Block:
# def __init__(self, items, index=0, fingers=None, aux=None):
# """Initialize a block."""
# self.items = deepcopy(items)
# self.index = index
# self.fingers = deepcopy(fingers) if fingers else []
# self.aux = deepcopy(aux)
#
# def hash(self):
# """Return the head of the block."""
# return binary_hash(packb(
# ("S", self.index, self.fingers, sort_dicts(self.items), self.aux)))
#
# @property
# def hid(self):
# return self.hash()
#
# def next_block(self, store, items, pre_commit_fn=None):
# """Build a subsequent block, sealing a list of transactions.
#
# :param store: Backend
# :param items: Block items
# :param pre_commit_fn: Function that gets called on the block before
# it gets committed to the chain.
# """
# new_index = self.index + 1
# new_fingers = [(self.index, self.hid)]
#
# finger_index = get_fingers(new_index)
# new_fingers += [f for f in self.fingers if f[0] in finger_index]
#
# new_b = Block(items, new_index, new_fingers)
#
# if pre_commit_fn is not None:
# pre_commit_fn(new_b)
#
# store[new_b.hid] = new_b
# return new_b
#
# def get_item(self, store, block_index, item_index, evidence=None):
# """Return an item from the chain at a specific block and item index.
#
# :param store: Backend
# :param block_index: Block index
# :param item_index: Item index
# :param evidence: If not None, return a bundle of evidence
# """
# # print "FIND: %s (%s, %s)" % (self.index, block_index, item_index)
#
# if not (0 <= block_index <= self.index):
# raise Exception("Block is beyond this chain head: must be 0 <= %s <= %s." % (block_index, self.index))
#
# if evidence != None:
# evidence[self.hid] = self
#
# if block_index == self.index:
# if not (0 <= item_index < len(self.items)):
# raise Exception("Item is beyond this Block: must be 0 <= %s <= %s." % (item_index, len(self.items)))
#
# return self.items[item_index]
#
# _, target_h = [(f,block_hash) for (f, block_hash) in self.fingers if f >= block_index][-1]
#
# # Get the target block and check its integrity
# target_block = store[target_h]
# check_hash(target_h, target_block)
#
# return target_block.get_item(store, block_index, item_index, evidence)
#
# def __eq__(self, other):
# return self.hid == other.hid
#
# Path: hippiehug-package/hippiehug/Utils.py
# def ascii_hash(item):
# """
# >>> ascii_hash(b'value')[:4] == b'cd42'
# True
# """
# return hexlify(binary_hash(item))
which might include code, classes, or functions. Output only the next line. | self.chain = DocChain(store=self, root_hash=new_root) |
Here is a snippet: <|code_start|># We implement a chain that lives on Amazon S3
# For tests it necessary to have a configured AWS account.
try:
except:
print("Cannot install Boto3")
try:
except:
def worker(q, bucket):
while True:
(key, value) = q.get()
try:
<|code_end|>
. Write the next line using the current file imports:
from .Chain import DocChain, Document, Block
from .Utils import ascii_hash
from binascii import hexlify
from os import urandom
from json import dumps, loads
from Queue import Queue as Queue
from queue import Queue as Queue
from threading import Thread
import boto3
and context from other files:
# Path: hippiehug-package/hippiehug/Chain.py
# class DocChain(Chain):
# """A chain that stores hashes of documents. Construct like a *Chain*."""
#
# def multi_add(self, items):
# """Add multiple items to seal a new block."""
#
# docs = list(map(Document, items))
# for d in docs:
# self.store[d.hid] = d
#
# docs_id = list(map(lambda d: d.hid, docs))
# Chain.multi_add(self, docs_id)
#
# def get(self, block_index, item_index, evidence=None):
# """Get a sealed item, and optionally a bundle of evidence."""
#
# ## Get Doc and check its hash
# item = Chain.get(self, block_index, item_index, evidence)
# d = self.store[item]
# check_hash(item, d)
#
# if evidence != None:
# evidence[d.hid] = d
#
# return self.store[item].item
#
# def check(self, root, block_index, item_index, item):
# """Check that an item is within the structure at a specific point."""
# ret = True
# ret = ret and (self.root() == root)
# ret = ret and (self.get(block_index, item_index) == item)
# return ret
#
# class Document:
# __slots__ = ["item", "hid"]
#
# def __init__(self, item):
# self.item = item
# """ The item stored in the Leaf. """
#
# self.hid = binary_hash(packb(("D", self.item)))
#
# def identity(self):
# """ Returns the hash ID of the Leaf. """
# return self.hid
#
# def __eq__(self, other):
# return self.hid == other.hid
#
# class Block:
# def __init__(self, items, index=0, fingers=None, aux=None):
# """Initialize a block."""
# self.items = deepcopy(items)
# self.index = index
# self.fingers = deepcopy(fingers) if fingers else []
# self.aux = deepcopy(aux)
#
# def hash(self):
# """Return the head of the block."""
# return binary_hash(packb(
# ("S", self.index, self.fingers, sort_dicts(self.items), self.aux)))
#
# @property
# def hid(self):
# return self.hash()
#
# def next_block(self, store, items, pre_commit_fn=None):
# """Build a subsequent block, sealing a list of transactions.
#
# :param store: Backend
# :param items: Block items
# :param pre_commit_fn: Function that gets called on the block before
# it gets committed to the chain.
# """
# new_index = self.index + 1
# new_fingers = [(self.index, self.hid)]
#
# finger_index = get_fingers(new_index)
# new_fingers += [f for f in self.fingers if f[0] in finger_index]
#
# new_b = Block(items, new_index, new_fingers)
#
# if pre_commit_fn is not None:
# pre_commit_fn(new_b)
#
# store[new_b.hid] = new_b
# return new_b
#
# def get_item(self, store, block_index, item_index, evidence=None):
# """Return an item from the chain at a specific block and item index.
#
# :param store: Backend
# :param block_index: Block index
# :param item_index: Item index
# :param evidence: If not None, return a bundle of evidence
# """
# # print "FIND: %s (%s, %s)" % (self.index, block_index, item_index)
#
# if not (0 <= block_index <= self.index):
# raise Exception("Block is beyond this chain head: must be 0 <= %s <= %s." % (block_index, self.index))
#
# if evidence != None:
# evidence[self.hid] = self
#
# if block_index == self.index:
# if not (0 <= item_index < len(self.items)):
# raise Exception("Item is beyond this Block: must be 0 <= %s <= %s." % (item_index, len(self.items)))
#
# return self.items[item_index]
#
# _, target_h = [(f,block_hash) for (f, block_hash) in self.fingers if f >= block_index][-1]
#
# # Get the target block and check its integrity
# target_block = store[target_h]
# check_hash(target_h, target_block)
#
# return target_block.get_item(store, block_index, item_index, evidence)
#
# def __eq__(self, other):
# return self.hid == other.hid
#
# Path: hippiehug-package/hippiehug/Utils.py
# def ascii_hash(item):
# """
# >>> ascii_hash(b'value')[:4] == b'cd42'
# True
# """
# return hexlify(binary_hash(item))
, which may include functions, classes, or code. Output only the next line. | if isinstance(value, Document): |
Continue the code snippet: <|code_start|># We implement a chain that lives on Amazon S3
# For tests it necessary to have a configured AWS account.
try:
except:
print("Cannot install Boto3")
try:
except:
def worker(q, bucket):
while True:
(key, value) = q.get()
try:
if isinstance(value, Document):
bucket.put_object(Key="/Objects/%s" % key, ContentType="text/plain",
Body=value.item, Metadata={"type":"Document"})
<|code_end|>
. Use current file imports:
from .Chain import DocChain, Document, Block
from .Utils import ascii_hash
from binascii import hexlify
from os import urandom
from json import dumps, loads
from Queue import Queue as Queue
from queue import Queue as Queue
from threading import Thread
import boto3
and context (classes, functions, or code) from other files:
# Path: hippiehug-package/hippiehug/Chain.py
# class DocChain(Chain):
# """A chain that stores hashes of documents. Construct like a *Chain*."""
#
# def multi_add(self, items):
# """Add multiple items to seal a new block."""
#
# docs = list(map(Document, items))
# for d in docs:
# self.store[d.hid] = d
#
# docs_id = list(map(lambda d: d.hid, docs))
# Chain.multi_add(self, docs_id)
#
# def get(self, block_index, item_index, evidence=None):
# """Get a sealed item, and optionally a bundle of evidence."""
#
# ## Get Doc and check its hash
# item = Chain.get(self, block_index, item_index, evidence)
# d = self.store[item]
# check_hash(item, d)
#
# if evidence != None:
# evidence[d.hid] = d
#
# return self.store[item].item
#
# def check(self, root, block_index, item_index, item):
# """Check that an item is within the structure at a specific point."""
# ret = True
# ret = ret and (self.root() == root)
# ret = ret and (self.get(block_index, item_index) == item)
# return ret
#
# class Document:
# __slots__ = ["item", "hid"]
#
# def __init__(self, item):
# self.item = item
# """ The item stored in the Leaf. """
#
# self.hid = binary_hash(packb(("D", self.item)))
#
# def identity(self):
# """ Returns the hash ID of the Leaf. """
# return self.hid
#
# def __eq__(self, other):
# return self.hid == other.hid
#
# class Block:
# def __init__(self, items, index=0, fingers=None, aux=None):
# """Initialize a block."""
# self.items = deepcopy(items)
# self.index = index
# self.fingers = deepcopy(fingers) if fingers else []
# self.aux = deepcopy(aux)
#
# def hash(self):
# """Return the head of the block."""
# return binary_hash(packb(
# ("S", self.index, self.fingers, sort_dicts(self.items), self.aux)))
#
# @property
# def hid(self):
# return self.hash()
#
# def next_block(self, store, items, pre_commit_fn=None):
# """Build a subsequent block, sealing a list of transactions.
#
# :param store: Backend
# :param items: Block items
# :param pre_commit_fn: Function that gets called on the block before
# it gets committed to the chain.
# """
# new_index = self.index + 1
# new_fingers = [(self.index, self.hid)]
#
# finger_index = get_fingers(new_index)
# new_fingers += [f for f in self.fingers if f[0] in finger_index]
#
# new_b = Block(items, new_index, new_fingers)
#
# if pre_commit_fn is not None:
# pre_commit_fn(new_b)
#
# store[new_b.hid] = new_b
# return new_b
#
# def get_item(self, store, block_index, item_index, evidence=None):
# """Return an item from the chain at a specific block and item index.
#
# :param store: Backend
# :param block_index: Block index
# :param item_index: Item index
# :param evidence: If not None, return a bundle of evidence
# """
# # print "FIND: %s (%s, %s)" % (self.index, block_index, item_index)
#
# if not (0 <= block_index <= self.index):
# raise Exception("Block is beyond this chain head: must be 0 <= %s <= %s." % (block_index, self.index))
#
# if evidence != None:
# evidence[self.hid] = self
#
# if block_index == self.index:
# if not (0 <= item_index < len(self.items)):
# raise Exception("Item is beyond this Block: must be 0 <= %s <= %s." % (item_index, len(self.items)))
#
# return self.items[item_index]
#
# _, target_h = [(f,block_hash) for (f, block_hash) in self.fingers if f >= block_index][-1]
#
# # Get the target block and check its integrity
# target_block = store[target_h]
# check_hash(target_h, target_block)
#
# return target_block.get_item(store, block_index, item_index, evidence)
#
# def __eq__(self, other):
# return self.hid == other.hid
#
# Path: hippiehug-package/hippiehug/Utils.py
# def ascii_hash(item):
# """
# >>> ascii_hash(b'value')[:4] == b'cd42'
# True
# """
# return hexlify(binary_hash(item))
. Output only the next line. | if isinstance(value, Block): |
Here is a snippet: <|code_start|> self.cache[key] = value
self.q.put((key, value))
def add(self, items):
""" Add a new block with the given items. """
self.chain.multi_add(items)
self.q.join()
# Only commit the new head after everything else.
new_root = self.chain.root()
self.bucket.put_object(Key="/root", ContentType="text/plain",
Body=new_root, Metadata={"type":"Root"})
def get(self, bid, sid, evidence = None):
""" Get the item at the block bid, position sid. Optionally, gather
evidence for the proof."""
return self.chain.get(bid, sid, evidence)
def __del__(self):
pass # self.q.join()
## ====================================================
## -------------------- TESTS -------------------------
def xtest_create_bucket():
<|code_end|>
. Write the next line using the current file imports:
from .Chain import DocChain, Document, Block
from .Utils import ascii_hash
from binascii import hexlify
from os import urandom
from json import dumps, loads
from Queue import Queue as Queue
from queue import Queue as Queue
from threading import Thread
import boto3
and context from other files:
# Path: hippiehug-package/hippiehug/Chain.py
# class DocChain(Chain):
# """A chain that stores hashes of documents. Construct like a *Chain*."""
#
# def multi_add(self, items):
# """Add multiple items to seal a new block."""
#
# docs = list(map(Document, items))
# for d in docs:
# self.store[d.hid] = d
#
# docs_id = list(map(lambda d: d.hid, docs))
# Chain.multi_add(self, docs_id)
#
# def get(self, block_index, item_index, evidence=None):
# """Get a sealed item, and optionally a bundle of evidence."""
#
# ## Get Doc and check its hash
# item = Chain.get(self, block_index, item_index, evidence)
# d = self.store[item]
# check_hash(item, d)
#
# if evidence != None:
# evidence[d.hid] = d
#
# return self.store[item].item
#
# def check(self, root, block_index, item_index, item):
# """Check that an item is within the structure at a specific point."""
# ret = True
# ret = ret and (self.root() == root)
# ret = ret and (self.get(block_index, item_index) == item)
# return ret
#
# class Document:
# __slots__ = ["item", "hid"]
#
# def __init__(self, item):
# self.item = item
# """ The item stored in the Leaf. """
#
# self.hid = binary_hash(packb(("D", self.item)))
#
# def identity(self):
# """ Returns the hash ID of the Leaf. """
# return self.hid
#
# def __eq__(self, other):
# return self.hid == other.hid
#
# class Block:
# def __init__(self, items, index=0, fingers=None, aux=None):
# """Initialize a block."""
# self.items = deepcopy(items)
# self.index = index
# self.fingers = deepcopy(fingers) if fingers else []
# self.aux = deepcopy(aux)
#
# def hash(self):
# """Return the head of the block."""
# return binary_hash(packb(
# ("S", self.index, self.fingers, sort_dicts(self.items), self.aux)))
#
# @property
# def hid(self):
# return self.hash()
#
# def next_block(self, store, items, pre_commit_fn=None):
# """Build a subsequent block, sealing a list of transactions.
#
# :param store: Backend
# :param items: Block items
# :param pre_commit_fn: Function that gets called on the block before
# it gets committed to the chain.
# """
# new_index = self.index + 1
# new_fingers = [(self.index, self.hid)]
#
# finger_index = get_fingers(new_index)
# new_fingers += [f for f in self.fingers if f[0] in finger_index]
#
# new_b = Block(items, new_index, new_fingers)
#
# if pre_commit_fn is not None:
# pre_commit_fn(new_b)
#
# store[new_b.hid] = new_b
# return new_b
#
# def get_item(self, store, block_index, item_index, evidence=None):
# """Return an item from the chain at a specific block and item index.
#
# :param store: Backend
# :param block_index: Block index
# :param item_index: Item index
# :param evidence: If not None, return a bundle of evidence
# """
# # print "FIND: %s (%s, %s)" % (self.index, block_index, item_index)
#
# if not (0 <= block_index <= self.index):
# raise Exception("Block is beyond this chain head: must be 0 <= %s <= %s." % (block_index, self.index))
#
# if evidence != None:
# evidence[self.hid] = self
#
# if block_index == self.index:
# if not (0 <= item_index < len(self.items)):
# raise Exception("Item is beyond this Block: must be 0 <= %s <= %s." % (item_index, len(self.items)))
#
# return self.items[item_index]
#
# _, target_h = [(f,block_hash) for (f, block_hash) in self.fingers if f >= block_index][-1]
#
# # Get the target block and check its integrity
# target_block = store[target_h]
# check_hash(target_h, target_block)
#
# return target_block.get_item(store, block_index, item_index, evidence)
#
# def __eq__(self, other):
# return self.hid == other.hid
#
# Path: hippiehug-package/hippiehug/Utils.py
# def ascii_hash(item):
# """
# >>> ascii_hash(b'value')[:4] == b'cd42'
# True
# """
# return hexlify(binary_hash(item))
, which may include functions, classes, or code. Output only the next line. | test1name = ascii_hash(urandom(16)) |
Here is a snippet: <|code_start|>
# from json import dumps, loads
class RedisChain():
def __init__(self, chain_name, host='localhost', port=6379, db=0):
""" Initialize the Redis chain with an redis database. """
self.r = redis.StrictRedis(host, port, db)
self.name = chain_name
self.cache = {}
# Recover the latest head, if there is one
new_head = self.r.get('%s.head' % self.name)
# Initialize the chain
<|code_end|>
. Write the next line using the current file imports:
import future
import redis
import pytest
from .Chain import DocChain, Document, Block
from msgpack import packb, unpackb
from queue import Queue
from threading import Thread
and context from other files:
# Path: hippiehug-package/hippiehug/Chain.py
# class DocChain(Chain):
# """A chain that stores hashes of documents. Construct like a *Chain*."""
#
# def multi_add(self, items):
# """Add multiple items to seal a new block."""
#
# docs = list(map(Document, items))
# for d in docs:
# self.store[d.hid] = d
#
# docs_id = list(map(lambda d: d.hid, docs))
# Chain.multi_add(self, docs_id)
#
# def get(self, block_index, item_index, evidence=None):
# """Get a sealed item, and optionally a bundle of evidence."""
#
# ## Get Doc and check its hash
# item = Chain.get(self, block_index, item_index, evidence)
# d = self.store[item]
# check_hash(item, d)
#
# if evidence != None:
# evidence[d.hid] = d
#
# return self.store[item].item
#
# def check(self, root, block_index, item_index, item):
# """Check that an item is within the structure at a specific point."""
# ret = True
# ret = ret and (self.root() == root)
# ret = ret and (self.get(block_index, item_index) == item)
# return ret
#
# class Document:
# __slots__ = ["item", "hid"]
#
# def __init__(self, item):
# self.item = item
# """ The item stored in the Leaf. """
#
# self.hid = binary_hash(packb(("D", self.item)))
#
# def identity(self):
# """ Returns the hash ID of the Leaf. """
# return self.hid
#
# def __eq__(self, other):
# return self.hid == other.hid
#
# class Block:
# def __init__(self, items, index=0, fingers=None, aux=None):
# """Initialize a block."""
# self.items = deepcopy(items)
# self.index = index
# self.fingers = deepcopy(fingers) if fingers else []
# self.aux = deepcopy(aux)
#
# def hash(self):
# """Return the head of the block."""
# return binary_hash(packb(
# ("S", self.index, self.fingers, sort_dicts(self.items), self.aux)))
#
# @property
# def hid(self):
# return self.hash()
#
# def next_block(self, store, items, pre_commit_fn=None):
# """Build a subsequent block, sealing a list of transactions.
#
# :param store: Backend
# :param items: Block items
# :param pre_commit_fn: Function that gets called on the block before
# it gets committed to the chain.
# """
# new_index = self.index + 1
# new_fingers = [(self.index, self.hid)]
#
# finger_index = get_fingers(new_index)
# new_fingers += [f for f in self.fingers if f[0] in finger_index]
#
# new_b = Block(items, new_index, new_fingers)
#
# if pre_commit_fn is not None:
# pre_commit_fn(new_b)
#
# store[new_b.hid] = new_b
# return new_b
#
# def get_item(self, store, block_index, item_index, evidence=None):
# """Return an item from the chain at a specific block and item index.
#
# :param store: Backend
# :param block_index: Block index
# :param item_index: Item index
# :param evidence: If not None, return a bundle of evidence
# """
# # print "FIND: %s (%s, %s)" % (self.index, block_index, item_index)
#
# if not (0 <= block_index <= self.index):
# raise Exception("Block is beyond this chain head: must be 0 <= %s <= %s." % (block_index, self.index))
#
# if evidence != None:
# evidence[self.hid] = self
#
# if block_index == self.index:
# if not (0 <= item_index < len(self.items)):
# raise Exception("Item is beyond this Block: must be 0 <= %s <= %s." % (item_index, len(self.items)))
#
# return self.items[item_index]
#
# _, target_h = [(f,block_hash) for (f, block_hash) in self.fingers if f >= block_index][-1]
#
# # Get the target block and check its integrity
# target_block = store[target_h]
# check_hash(target_h, target_block)
#
# return target_block.get_item(store, block_index, item_index, evidence)
#
# def __eq__(self, other):
# return self.hid == other.hid
, which may include functions, classes, or code. Output only the next line. | self.chain = DocChain(store=self, root_hash=new_head) |
Given the following code snippet before the placeholder: <|code_start|>class RedisChain():
def __init__(self, chain_name, host='localhost', port=6379, db=0):
""" Initialize the Redis chain with an redis database. """
self.r = redis.StrictRedis(host, port, db)
self.name = chain_name
self.cache = {}
# Recover the latest head, if there is one
new_head = self.r.get('%s.head' % self.name)
# Initialize the chain
self.chain = DocChain(store=self, root_hash=new_head)
def root(self):
""" Returns the root of the chain. """
return self.chain.root()
def __getitem__(self, key):
if key in self.cache:
return self.cache[key]
if len(self.cache) > 10000:
self.cache = {}
o = unpackb(self.r.get(key))
if o[b"type"] == b"Document":
<|code_end|>
, predict the next line using imports from the current file:
import future
import redis
import pytest
from .Chain import DocChain, Document, Block
from msgpack import packb, unpackb
from queue import Queue
from threading import Thread
and context including class names, function names, and sometimes code from other files:
# Path: hippiehug-package/hippiehug/Chain.py
# class DocChain(Chain):
# """A chain that stores hashes of documents. Construct like a *Chain*."""
#
# def multi_add(self, items):
# """Add multiple items to seal a new block."""
#
# docs = list(map(Document, items))
# for d in docs:
# self.store[d.hid] = d
#
# docs_id = list(map(lambda d: d.hid, docs))
# Chain.multi_add(self, docs_id)
#
# def get(self, block_index, item_index, evidence=None):
# """Get a sealed item, and optionally a bundle of evidence."""
#
# ## Get Doc and check its hash
# item = Chain.get(self, block_index, item_index, evidence)
# d = self.store[item]
# check_hash(item, d)
#
# if evidence != None:
# evidence[d.hid] = d
#
# return self.store[item].item
#
# def check(self, root, block_index, item_index, item):
# """Check that an item is within the structure at a specific point."""
# ret = True
# ret = ret and (self.root() == root)
# ret = ret and (self.get(block_index, item_index) == item)
# return ret
#
# class Document:
# __slots__ = ["item", "hid"]
#
# def __init__(self, item):
# self.item = item
# """ The item stored in the Leaf. """
#
# self.hid = binary_hash(packb(("D", self.item)))
#
# def identity(self):
# """ Returns the hash ID of the Leaf. """
# return self.hid
#
# def __eq__(self, other):
# return self.hid == other.hid
#
# class Block:
# def __init__(self, items, index=0, fingers=None, aux=None):
# """Initialize a block."""
# self.items = deepcopy(items)
# self.index = index
# self.fingers = deepcopy(fingers) if fingers else []
# self.aux = deepcopy(aux)
#
# def hash(self):
# """Return the head of the block."""
# return binary_hash(packb(
# ("S", self.index, self.fingers, sort_dicts(self.items), self.aux)))
#
# @property
# def hid(self):
# return self.hash()
#
# def next_block(self, store, items, pre_commit_fn=None):
# """Build a subsequent block, sealing a list of transactions.
#
# :param store: Backend
# :param items: Block items
# :param pre_commit_fn: Function that gets called on the block before
# it gets committed to the chain.
# """
# new_index = self.index + 1
# new_fingers = [(self.index, self.hid)]
#
# finger_index = get_fingers(new_index)
# new_fingers += [f for f in self.fingers if f[0] in finger_index]
#
# new_b = Block(items, new_index, new_fingers)
#
# if pre_commit_fn is not None:
# pre_commit_fn(new_b)
#
# store[new_b.hid] = new_b
# return new_b
#
# def get_item(self, store, block_index, item_index, evidence=None):
# """Return an item from the chain at a specific block and item index.
#
# :param store: Backend
# :param block_index: Block index
# :param item_index: Item index
# :param evidence: If not None, return a bundle of evidence
# """
# # print "FIND: %s (%s, %s)" % (self.index, block_index, item_index)
#
# if not (0 <= block_index <= self.index):
# raise Exception("Block is beyond this chain head: must be 0 <= %s <= %s." % (block_index, self.index))
#
# if evidence != None:
# evidence[self.hid] = self
#
# if block_index == self.index:
# if not (0 <= item_index < len(self.items)):
# raise Exception("Item is beyond this Block: must be 0 <= %s <= %s." % (item_index, len(self.items)))
#
# return self.items[item_index]
#
# _, target_h = [(f,block_hash) for (f, block_hash) in self.fingers if f >= block_index][-1]
#
# # Get the target block and check its integrity
# target_block = store[target_h]
# check_hash(target_h, target_block)
#
# return target_block.get_item(store, block_index, item_index, evidence)
#
# def __eq__(self, other):
# return self.hid == other.hid
. Output only the next line. | obj = Document(o[b"body"]) |
Next line prediction: <|code_start|> self.r = redis.StrictRedis(host, port, db)
self.name = chain_name
self.cache = {}
# Recover the latest head, if there is one
new_head = self.r.get('%s.head' % self.name)
# Initialize the chain
self.chain = DocChain(store=self, root_hash=new_head)
def root(self):
""" Returns the root of the chain. """
return self.chain.root()
def __getitem__(self, key):
if key in self.cache:
return self.cache[key]
if len(self.cache) > 10000:
self.cache = {}
o = unpackb(self.r.get(key))
if o[b"type"] == b"Document":
obj = Document(o[b"body"])
if o[b"type"] == b"Block":
<|code_end|>
. Use current file imports:
(import future
import redis
import pytest
from .Chain import DocChain, Document, Block
from msgpack import packb, unpackb
from queue import Queue
from threading import Thread)
and context including class names, function names, or small code snippets from other files:
# Path: hippiehug-package/hippiehug/Chain.py
# class DocChain(Chain):
# """A chain that stores hashes of documents. Construct like a *Chain*."""
#
# def multi_add(self, items):
# """Add multiple items to seal a new block."""
#
# docs = list(map(Document, items))
# for d in docs:
# self.store[d.hid] = d
#
# docs_id = list(map(lambda d: d.hid, docs))
# Chain.multi_add(self, docs_id)
#
# def get(self, block_index, item_index, evidence=None):
# """Get a sealed item, and optionally a bundle of evidence."""
#
# ## Get Doc and check its hash
# item = Chain.get(self, block_index, item_index, evidence)
# d = self.store[item]
# check_hash(item, d)
#
# if evidence != None:
# evidence[d.hid] = d
#
# return self.store[item].item
#
# def check(self, root, block_index, item_index, item):
# """Check that an item is within the structure at a specific point."""
# ret = True
# ret = ret and (self.root() == root)
# ret = ret and (self.get(block_index, item_index) == item)
# return ret
#
# class Document:
# __slots__ = ["item", "hid"]
#
# def __init__(self, item):
# self.item = item
# """ The item stored in the Leaf. """
#
# self.hid = binary_hash(packb(("D", self.item)))
#
# def identity(self):
# """ Returns the hash ID of the Leaf. """
# return self.hid
#
# def __eq__(self, other):
# return self.hid == other.hid
#
# class Block:
# def __init__(self, items, index=0, fingers=None, aux=None):
# """Initialize a block."""
# self.items = deepcopy(items)
# self.index = index
# self.fingers = deepcopy(fingers) if fingers else []
# self.aux = deepcopy(aux)
#
# def hash(self):
# """Return the head of the block."""
# return binary_hash(packb(
# ("S", self.index, self.fingers, sort_dicts(self.items), self.aux)))
#
# @property
# def hid(self):
# return self.hash()
#
# def next_block(self, store, items, pre_commit_fn=None):
# """Build a subsequent block, sealing a list of transactions.
#
# :param store: Backend
# :param items: Block items
# :param pre_commit_fn: Function that gets called on the block before
# it gets committed to the chain.
# """
# new_index = self.index + 1
# new_fingers = [(self.index, self.hid)]
#
# finger_index = get_fingers(new_index)
# new_fingers += [f for f in self.fingers if f[0] in finger_index]
#
# new_b = Block(items, new_index, new_fingers)
#
# if pre_commit_fn is not None:
# pre_commit_fn(new_b)
#
# store[new_b.hid] = new_b
# return new_b
#
# def get_item(self, store, block_index, item_index, evidence=None):
# """Return an item from the chain at a specific block and item index.
#
# :param store: Backend
# :param block_index: Block index
# :param item_index: Item index
# :param evidence: If not None, return a bundle of evidence
# """
# # print "FIND: %s (%s, %s)" % (self.index, block_index, item_index)
#
# if not (0 <= block_index <= self.index):
# raise Exception("Block is beyond this chain head: must be 0 <= %s <= %s." % (block_index, self.index))
#
# if evidence != None:
# evidence[self.hid] = self
#
# if block_index == self.index:
# if not (0 <= item_index < len(self.items)):
# raise Exception("Item is beyond this Block: must be 0 <= %s <= %s." % (item_index, len(self.items)))
#
# return self.items[item_index]
#
# _, target_h = [(f,block_hash) for (f, block_hash) in self.fingers if f >= block_index][-1]
#
# # Get the target block and check its integrity
# target_block = store[target_h]
# check_hash(target_h, target_block)
#
# return target_block.get_item(store, block_index, item_index, evidence)
#
# def __eq__(self, other):
# return self.hid == other.hid
. Output only the next line. | obj = Block(items=o[b"items"], index=o[b"index"], fingers=o[b"fingers"], aux=o[b"aux"]) |
Here is a snippet: <|code_start|> config_data = config_data.setdefault(prefix, {})
if key not in CONFIGURATION_KEYS:
logger.warning('"%s" is not a known configuration key', key)
if key in DEPRECATED.keys():
message = "{} is deprecated: {}".format(key, DEPRECATED[key])
logger.warning(message)
config_data[key] = type_(value)
def recursive_remove(config_data, key):
while '.' in key:
if not config_data:
return
prefix, key = key.split('.', 1)
config_data = config_data.get(prefix, {})
del config_data[key]
def main(args):
config = get_config()
if args.show:
logger.info(yaml_dump(config))
return
if args.show_sources:
<|code_end|>
. Write the next line using the current file imports:
import logging
from argparse import RawDescriptionHelpFormatter
from six import text_type
from binstar_client.errors import ShowHelp
from binstar_client.utils.config import (SEARCH_PATH, USER_CONFIG, SYSTEM_CONFIG, CONFIGURATION_KEYS,
get_config, save_config, load_config, load_file_configs)
from ..utils.yaml import yaml_dump, safe_load
and context from other files:
# Path: binstar_client/utils/config.py
# SEARCH_PATH = (
# dirs.site_data_dir,
# '/etc/anaconda-client/',
# '$CONDA_ROOT/etc/anaconda-client/',
# dirs.user_data_dir,
# '~/.continuum/anaconda-client/',
# '$CONDA_PREFIX/etc/anaconda-client/',
# )
#
# USER_CONFIG = join(dirs.user_data_dir, 'config.yaml')
#
# SYSTEM_CONFIG = SITE_CONFIG
#
# CONFIGURATION_KEYS = [
# 'auto_register',
# 'default_site',
# 'upload_user',
# 'sites',
# 'url',
# 'verify_ssl',
# 'ssl_verify',
# ]
#
# def get_config(site=None):
# config = DEFAULT_CONFIG.copy()
#
# file_configs = load_file_configs(SEARCH_PATH)
# for fn in file_configs:
# recursive_update(config, file_configs[fn])
#
# site = site or config.get('default_site')
# sites = config.get('sites', {})
#
# if site:
# site = str(site)
#
# if site not in sites:
# logger.warning('Site alias "%s" does not exist in the config file', site)
# else:
# # This takes whatever keys are set for the site into the top level of the config dict
# recursive_update(config, sites.get(site, {}))
#
# return config
#
# def save_config(data, config_file):
# data_dir = dirname(config_file)
#
# try:
# if not exists(data_dir):
# os.makedirs(data_dir)
#
# with open(config_file, 'w') as fd:
# yaml_dump(data, stream=fd)
# except EnvironmentError as exc:
# raise BinstarError('%s: %s' % (exc.filename, exc.strerror,))
#
# def load_config(config_file):
# if exists(config_file):
# with open(config_file) as fd:
# data = yaml_load(fd)
# if data:
# return data
#
# return {}
#
# def load_file_configs(search_path):
# def _file_yaml_loader(fullpath):
# assert fullpath.endswith(".yml") or fullpath.endswith(".yaml") or fullpath.endswith("anacondarc"), fullpath
# yield fullpath, load_config(fullpath)
#
# def _dir_yaml_loader(fullpath):
# for filename in os.listdir(fullpath):
# if filename.endswith(".yml") or filename.endswith(".yaml"):
# filepath = join(fullpath, filename)
# yield filepath, load_config(filepath)
#
# # map a stat result to a file loader or a directory loader
# _loader = {
# stat.S_IFREG: _file_yaml_loader,
# stat.S_IFDIR: _dir_yaml_loader,
# }
#
# def _get_st_mode(path):
# # stat the path for file type, or None if path doesn't exist
# try:
# return stat.S_IFMT(os.stat(path).st_mode)
# except OSError:
# return None
#
# expanded_paths = [expand(path) for path in search_path]
# stat_paths = (_get_st_mode(path) for path in expanded_paths)
# load_paths = (_loader[st_mode](path)
# for path, st_mode in zip(expanded_paths, stat_paths)
# if st_mode is not None)
# raw_data = collections.OrderedDict(kv for kv in itertools.chain.from_iterable(load_paths))
#
# return raw_data
#
# Path: binstar_client/utils/yaml.py
# def yaml_load(stream):
# def yaml_dump(data, stream=None):
, which may include functions, classes, or code. Output only the next line. | config_files = load_file_configs(SEARCH_PATH) |
Given the code snippet: <|code_start|> if not config_data:
return
prefix, key = key.split('.', 1)
config_data = config_data.get(prefix, {})
del config_data[key]
def main(args):
config = get_config()
if args.show:
logger.info(yaml_dump(config))
return
if args.show_sources:
config_files = load_file_configs(SEARCH_PATH)
for path in config_files:
logger.info('==> %s <==', path)
logger.info(yaml_dump(config_files[path]))
return
if args.get:
if args.get in config:
logger.info(config[args.get])
else:
logger.info("The value of '%s' is not set." % args.get)
return
if args.files:
<|code_end|>
, generate the next line using the imports in this file:
import logging
from argparse import RawDescriptionHelpFormatter
from six import text_type
from binstar_client.errors import ShowHelp
from binstar_client.utils.config import (SEARCH_PATH, USER_CONFIG, SYSTEM_CONFIG, CONFIGURATION_KEYS,
get_config, save_config, load_config, load_file_configs)
from ..utils.yaml import yaml_dump, safe_load
and context (functions, classes, or occasionally code) from other files:
# Path: binstar_client/utils/config.py
# SEARCH_PATH = (
# dirs.site_data_dir,
# '/etc/anaconda-client/',
# '$CONDA_ROOT/etc/anaconda-client/',
# dirs.user_data_dir,
# '~/.continuum/anaconda-client/',
# '$CONDA_PREFIX/etc/anaconda-client/',
# )
#
# USER_CONFIG = join(dirs.user_data_dir, 'config.yaml')
#
# SYSTEM_CONFIG = SITE_CONFIG
#
# CONFIGURATION_KEYS = [
# 'auto_register',
# 'default_site',
# 'upload_user',
# 'sites',
# 'url',
# 'verify_ssl',
# 'ssl_verify',
# ]
#
# def get_config(site=None):
# config = DEFAULT_CONFIG.copy()
#
# file_configs = load_file_configs(SEARCH_PATH)
# for fn in file_configs:
# recursive_update(config, file_configs[fn])
#
# site = site or config.get('default_site')
# sites = config.get('sites', {})
#
# if site:
# site = str(site)
#
# if site not in sites:
# logger.warning('Site alias "%s" does not exist in the config file', site)
# else:
# # This takes whatever keys are set for the site into the top level of the config dict
# recursive_update(config, sites.get(site, {}))
#
# return config
#
# def save_config(data, config_file):
# data_dir = dirname(config_file)
#
# try:
# if not exists(data_dir):
# os.makedirs(data_dir)
#
# with open(config_file, 'w') as fd:
# yaml_dump(data, stream=fd)
# except EnvironmentError as exc:
# raise BinstarError('%s: %s' % (exc.filename, exc.strerror,))
#
# def load_config(config_file):
# if exists(config_file):
# with open(config_file) as fd:
# data = yaml_load(fd)
# if data:
# return data
#
# return {}
#
# def load_file_configs(search_path):
# def _file_yaml_loader(fullpath):
# assert fullpath.endswith(".yml") or fullpath.endswith(".yaml") or fullpath.endswith("anacondarc"), fullpath
# yield fullpath, load_config(fullpath)
#
# def _dir_yaml_loader(fullpath):
# for filename in os.listdir(fullpath):
# if filename.endswith(".yml") or filename.endswith(".yaml"):
# filepath = join(fullpath, filename)
# yield filepath, load_config(filepath)
#
# # map a stat result to a file loader or a directory loader
# _loader = {
# stat.S_IFREG: _file_yaml_loader,
# stat.S_IFDIR: _dir_yaml_loader,
# }
#
# def _get_st_mode(path):
# # stat the path for file type, or None if path doesn't exist
# try:
# return stat.S_IFMT(os.stat(path).st_mode)
# except OSError:
# return None
#
# expanded_paths = [expand(path) for path in search_path]
# stat_paths = (_get_st_mode(path) for path in expanded_paths)
# load_paths = (_loader[st_mode](path)
# for path, st_mode in zip(expanded_paths, stat_paths)
# if st_mode is not None)
# raw_data = collections.OrderedDict(kv for kv in itertools.chain.from_iterable(load_paths))
#
# return raw_data
#
# Path: binstar_client/utils/yaml.py
# def yaml_load(stream):
# def yaml_dump(data, stream=None):
. Output only the next line. | logger.info('User Config: %s' % USER_CONFIG) |
Based on the snippet: <|code_start|> return
prefix, key = key.split('.', 1)
config_data = config_data.get(prefix, {})
del config_data[key]
def main(args):
config = get_config()
if args.show:
logger.info(yaml_dump(config))
return
if args.show_sources:
config_files = load_file_configs(SEARCH_PATH)
for path in config_files:
logger.info('==> %s <==', path)
logger.info(yaml_dump(config_files[path]))
return
if args.get:
if args.get in config:
logger.info(config[args.get])
else:
logger.info("The value of '%s' is not set." % args.get)
return
if args.files:
logger.info('User Config: %s' % USER_CONFIG)
<|code_end|>
, predict the immediate next line with the help of imports:
import logging
from argparse import RawDescriptionHelpFormatter
from six import text_type
from binstar_client.errors import ShowHelp
from binstar_client.utils.config import (SEARCH_PATH, USER_CONFIG, SYSTEM_CONFIG, CONFIGURATION_KEYS,
get_config, save_config, load_config, load_file_configs)
from ..utils.yaml import yaml_dump, safe_load
and context (classes, functions, sometimes code) from other files:
# Path: binstar_client/utils/config.py
# SEARCH_PATH = (
# dirs.site_data_dir,
# '/etc/anaconda-client/',
# '$CONDA_ROOT/etc/anaconda-client/',
# dirs.user_data_dir,
# '~/.continuum/anaconda-client/',
# '$CONDA_PREFIX/etc/anaconda-client/',
# )
#
# USER_CONFIG = join(dirs.user_data_dir, 'config.yaml')
#
# SYSTEM_CONFIG = SITE_CONFIG
#
# CONFIGURATION_KEYS = [
# 'auto_register',
# 'default_site',
# 'upload_user',
# 'sites',
# 'url',
# 'verify_ssl',
# 'ssl_verify',
# ]
#
# def get_config(site=None):
# config = DEFAULT_CONFIG.copy()
#
# file_configs = load_file_configs(SEARCH_PATH)
# for fn in file_configs:
# recursive_update(config, file_configs[fn])
#
# site = site or config.get('default_site')
# sites = config.get('sites', {})
#
# if site:
# site = str(site)
#
# if site not in sites:
# logger.warning('Site alias "%s" does not exist in the config file', site)
# else:
# # This takes whatever keys are set for the site into the top level of the config dict
# recursive_update(config, sites.get(site, {}))
#
# return config
#
# def save_config(data, config_file):
# data_dir = dirname(config_file)
#
# try:
# if not exists(data_dir):
# os.makedirs(data_dir)
#
# with open(config_file, 'w') as fd:
# yaml_dump(data, stream=fd)
# except EnvironmentError as exc:
# raise BinstarError('%s: %s' % (exc.filename, exc.strerror,))
#
# def load_config(config_file):
# if exists(config_file):
# with open(config_file) as fd:
# data = yaml_load(fd)
# if data:
# return data
#
# return {}
#
# def load_file_configs(search_path):
# def _file_yaml_loader(fullpath):
# assert fullpath.endswith(".yml") or fullpath.endswith(".yaml") or fullpath.endswith("anacondarc"), fullpath
# yield fullpath, load_config(fullpath)
#
# def _dir_yaml_loader(fullpath):
# for filename in os.listdir(fullpath):
# if filename.endswith(".yml") or filename.endswith(".yaml"):
# filepath = join(fullpath, filename)
# yield filepath, load_config(filepath)
#
# # map a stat result to a file loader or a directory loader
# _loader = {
# stat.S_IFREG: _file_yaml_loader,
# stat.S_IFDIR: _dir_yaml_loader,
# }
#
# def _get_st_mode(path):
# # stat the path for file type, or None if path doesn't exist
# try:
# return stat.S_IFMT(os.stat(path).st_mode)
# except OSError:
# return None
#
# expanded_paths = [expand(path) for path in search_path]
# stat_paths = (_get_st_mode(path) for path in expanded_paths)
# load_paths = (_loader[st_mode](path)
# for path, st_mode in zip(expanded_paths, stat_paths)
# if st_mode is not None)
# raw_data = collections.OrderedDict(kv for kv in itertools.chain.from_iterable(load_paths))
#
# return raw_data
#
# Path: binstar_client/utils/yaml.py
# def yaml_load(stream):
# def yaml_dump(data, stream=None):
. Output only the next line. | logger.info('System Config: %s' % SYSTEM_CONFIG) |
Using the snippet: <|code_start|> * `ssl_verify`: Perform ssl validation on the https requests.
ssl_verify may be `True`, `False` or a path to a root CA pem file.
###### Toggle auto_register when doing anaconda upload
The default is yes, automatically create a new package when uploading.
If no, then an upload will fail if the package name does not already exist on the server.
anaconda config --set auto_register yes|no
'''
from __future__ import print_function
logger = logging.getLogger('binstar.config')
DEPRECATED = {
'verify_ssl': 'Please use ssl_verify instead'
}
def recursive_set(config_data, key, value, type_):
while '.' in key:
prefix, key = key.split('.', 1)
config_data = config_data.setdefault(prefix, {})
<|code_end|>
, determine the next line of code. You have imports:
import logging
from argparse import RawDescriptionHelpFormatter
from six import text_type
from binstar_client.errors import ShowHelp
from binstar_client.utils.config import (SEARCH_PATH, USER_CONFIG, SYSTEM_CONFIG, CONFIGURATION_KEYS,
get_config, save_config, load_config, load_file_configs)
from ..utils.yaml import yaml_dump, safe_load
and context (class names, function names, or code) available:
# Path: binstar_client/utils/config.py
# SEARCH_PATH = (
# dirs.site_data_dir,
# '/etc/anaconda-client/',
# '$CONDA_ROOT/etc/anaconda-client/',
# dirs.user_data_dir,
# '~/.continuum/anaconda-client/',
# '$CONDA_PREFIX/etc/anaconda-client/',
# )
#
# USER_CONFIG = join(dirs.user_data_dir, 'config.yaml')
#
# SYSTEM_CONFIG = SITE_CONFIG
#
# CONFIGURATION_KEYS = [
# 'auto_register',
# 'default_site',
# 'upload_user',
# 'sites',
# 'url',
# 'verify_ssl',
# 'ssl_verify',
# ]
#
# def get_config(site=None):
# config = DEFAULT_CONFIG.copy()
#
# file_configs = load_file_configs(SEARCH_PATH)
# for fn in file_configs:
# recursive_update(config, file_configs[fn])
#
# site = site or config.get('default_site')
# sites = config.get('sites', {})
#
# if site:
# site = str(site)
#
# if site not in sites:
# logger.warning('Site alias "%s" does not exist in the config file', site)
# else:
# # This takes whatever keys are set for the site into the top level of the config dict
# recursive_update(config, sites.get(site, {}))
#
# return config
#
# def save_config(data, config_file):
# data_dir = dirname(config_file)
#
# try:
# if not exists(data_dir):
# os.makedirs(data_dir)
#
# with open(config_file, 'w') as fd:
# yaml_dump(data, stream=fd)
# except EnvironmentError as exc:
# raise BinstarError('%s: %s' % (exc.filename, exc.strerror,))
#
# def load_config(config_file):
# if exists(config_file):
# with open(config_file) as fd:
# data = yaml_load(fd)
# if data:
# return data
#
# return {}
#
# def load_file_configs(search_path):
# def _file_yaml_loader(fullpath):
# assert fullpath.endswith(".yml") or fullpath.endswith(".yaml") or fullpath.endswith("anacondarc"), fullpath
# yield fullpath, load_config(fullpath)
#
# def _dir_yaml_loader(fullpath):
# for filename in os.listdir(fullpath):
# if filename.endswith(".yml") or filename.endswith(".yaml"):
# filepath = join(fullpath, filename)
# yield filepath, load_config(filepath)
#
# # map a stat result to a file loader or a directory loader
# _loader = {
# stat.S_IFREG: _file_yaml_loader,
# stat.S_IFDIR: _dir_yaml_loader,
# }
#
# def _get_st_mode(path):
# # stat the path for file type, or None if path doesn't exist
# try:
# return stat.S_IFMT(os.stat(path).st_mode)
# except OSError:
# return None
#
# expanded_paths = [expand(path) for path in search_path]
# stat_paths = (_get_st_mode(path) for path in expanded_paths)
# load_paths = (_loader[st_mode](path)
# for path, st_mode in zip(expanded_paths, stat_paths)
# if st_mode is not None)
# raw_data = collections.OrderedDict(kv for kv in itertools.chain.from_iterable(load_paths))
#
# return raw_data
#
# Path: binstar_client/utils/yaml.py
# def yaml_load(stream):
# def yaml_dump(data, stream=None):
. Output only the next line. | if key not in CONFIGURATION_KEYS: |
Based on the snippet: <|code_start|> 'verify_ssl': 'Please use ssl_verify instead'
}
def recursive_set(config_data, key, value, type_):
while '.' in key:
prefix, key = key.split('.', 1)
config_data = config_data.setdefault(prefix, {})
if key not in CONFIGURATION_KEYS:
logger.warning('"%s" is not a known configuration key', key)
if key in DEPRECATED.keys():
message = "{} is deprecated: {}".format(key, DEPRECATED[key])
logger.warning(message)
config_data[key] = type_(value)
def recursive_remove(config_data, key):
while '.' in key:
if not config_data:
return
prefix, key = key.split('.', 1)
config_data = config_data.get(prefix, {})
del config_data[key]
def main(args):
<|code_end|>
, predict the immediate next line with the help of imports:
import logging
from argparse import RawDescriptionHelpFormatter
from six import text_type
from binstar_client.errors import ShowHelp
from binstar_client.utils.config import (SEARCH_PATH, USER_CONFIG, SYSTEM_CONFIG, CONFIGURATION_KEYS,
get_config, save_config, load_config, load_file_configs)
from ..utils.yaml import yaml_dump, safe_load
and context (classes, functions, sometimes code) from other files:
# Path: binstar_client/utils/config.py
# SEARCH_PATH = (
# dirs.site_data_dir,
# '/etc/anaconda-client/',
# '$CONDA_ROOT/etc/anaconda-client/',
# dirs.user_data_dir,
# '~/.continuum/anaconda-client/',
# '$CONDA_PREFIX/etc/anaconda-client/',
# )
#
# USER_CONFIG = join(dirs.user_data_dir, 'config.yaml')
#
# SYSTEM_CONFIG = SITE_CONFIG
#
# CONFIGURATION_KEYS = [
# 'auto_register',
# 'default_site',
# 'upload_user',
# 'sites',
# 'url',
# 'verify_ssl',
# 'ssl_verify',
# ]
#
# def get_config(site=None):
# config = DEFAULT_CONFIG.copy()
#
# file_configs = load_file_configs(SEARCH_PATH)
# for fn in file_configs:
# recursive_update(config, file_configs[fn])
#
# site = site or config.get('default_site')
# sites = config.get('sites', {})
#
# if site:
# site = str(site)
#
# if site not in sites:
# logger.warning('Site alias "%s" does not exist in the config file', site)
# else:
# # This takes whatever keys are set for the site into the top level of the config dict
# recursive_update(config, sites.get(site, {}))
#
# return config
#
# def save_config(data, config_file):
# data_dir = dirname(config_file)
#
# try:
# if not exists(data_dir):
# os.makedirs(data_dir)
#
# with open(config_file, 'w') as fd:
# yaml_dump(data, stream=fd)
# except EnvironmentError as exc:
# raise BinstarError('%s: %s' % (exc.filename, exc.strerror,))
#
# def load_config(config_file):
# if exists(config_file):
# with open(config_file) as fd:
# data = yaml_load(fd)
# if data:
# return data
#
# return {}
#
# def load_file_configs(search_path):
# def _file_yaml_loader(fullpath):
# assert fullpath.endswith(".yml") or fullpath.endswith(".yaml") or fullpath.endswith("anacondarc"), fullpath
# yield fullpath, load_config(fullpath)
#
# def _dir_yaml_loader(fullpath):
# for filename in os.listdir(fullpath):
# if filename.endswith(".yml") or filename.endswith(".yaml"):
# filepath = join(fullpath, filename)
# yield filepath, load_config(filepath)
#
# # map a stat result to a file loader or a directory loader
# _loader = {
# stat.S_IFREG: _file_yaml_loader,
# stat.S_IFDIR: _dir_yaml_loader,
# }
#
# def _get_st_mode(path):
# # stat the path for file type, or None if path doesn't exist
# try:
# return stat.S_IFMT(os.stat(path).st_mode)
# except OSError:
# return None
#
# expanded_paths = [expand(path) for path in search_path]
# stat_paths = (_get_st_mode(path) for path in expanded_paths)
# load_paths = (_loader[st_mode](path)
# for path, st_mode in zip(expanded_paths, stat_paths)
# if st_mode is not None)
# raw_data = collections.OrderedDict(kv for kv in itertools.chain.from_iterable(load_paths))
#
# return raw_data
#
# Path: binstar_client/utils/yaml.py
# def yaml_load(stream):
# def yaml_dump(data, stream=None):
. Output only the next line. | config = get_config() |
Using the snippet: <|code_start|> return
if args.get:
if args.get in config:
logger.info(config[args.get])
else:
logger.info("The value of '%s' is not set." % args.get)
return
if args.files:
logger.info('User Config: %s' % USER_CONFIG)
logger.info('System Config: %s' % SYSTEM_CONFIG)
return
config_file = USER_CONFIG if args.user else SYSTEM_CONFIG
config = load_config(config_file)
for key, value in args.set:
recursive_set(config, key, value, args.type)
for key in args.remove:
try:
recursive_remove(config, key)
except KeyError:
logger.error("Key %s does not exist" % key)
if not (args.set or args.remove):
raise ShowHelp()
<|code_end|>
, determine the next line of code. You have imports:
import logging
from argparse import RawDescriptionHelpFormatter
from six import text_type
from binstar_client.errors import ShowHelp
from binstar_client.utils.config import (SEARCH_PATH, USER_CONFIG, SYSTEM_CONFIG, CONFIGURATION_KEYS,
get_config, save_config, load_config, load_file_configs)
from ..utils.yaml import yaml_dump, safe_load
and context (class names, function names, or code) available:
# Path: binstar_client/utils/config.py
# SEARCH_PATH = (
# dirs.site_data_dir,
# '/etc/anaconda-client/',
# '$CONDA_ROOT/etc/anaconda-client/',
# dirs.user_data_dir,
# '~/.continuum/anaconda-client/',
# '$CONDA_PREFIX/etc/anaconda-client/',
# )
#
# USER_CONFIG = join(dirs.user_data_dir, 'config.yaml')
#
# SYSTEM_CONFIG = SITE_CONFIG
#
# CONFIGURATION_KEYS = [
# 'auto_register',
# 'default_site',
# 'upload_user',
# 'sites',
# 'url',
# 'verify_ssl',
# 'ssl_verify',
# ]
#
# def get_config(site=None):
# config = DEFAULT_CONFIG.copy()
#
# file_configs = load_file_configs(SEARCH_PATH)
# for fn in file_configs:
# recursive_update(config, file_configs[fn])
#
# site = site or config.get('default_site')
# sites = config.get('sites', {})
#
# if site:
# site = str(site)
#
# if site not in sites:
# logger.warning('Site alias "%s" does not exist in the config file', site)
# else:
# # This takes whatever keys are set for the site into the top level of the config dict
# recursive_update(config, sites.get(site, {}))
#
# return config
#
# def save_config(data, config_file):
# data_dir = dirname(config_file)
#
# try:
# if not exists(data_dir):
# os.makedirs(data_dir)
#
# with open(config_file, 'w') as fd:
# yaml_dump(data, stream=fd)
# except EnvironmentError as exc:
# raise BinstarError('%s: %s' % (exc.filename, exc.strerror,))
#
# def load_config(config_file):
# if exists(config_file):
# with open(config_file) as fd:
# data = yaml_load(fd)
# if data:
# return data
#
# return {}
#
# def load_file_configs(search_path):
# def _file_yaml_loader(fullpath):
# assert fullpath.endswith(".yml") or fullpath.endswith(".yaml") or fullpath.endswith("anacondarc"), fullpath
# yield fullpath, load_config(fullpath)
#
# def _dir_yaml_loader(fullpath):
# for filename in os.listdir(fullpath):
# if filename.endswith(".yml") or filename.endswith(".yaml"):
# filepath = join(fullpath, filename)
# yield filepath, load_config(filepath)
#
# # map a stat result to a file loader or a directory loader
# _loader = {
# stat.S_IFREG: _file_yaml_loader,
# stat.S_IFDIR: _dir_yaml_loader,
# }
#
# def _get_st_mode(path):
# # stat the path for file type, or None if path doesn't exist
# try:
# return stat.S_IFMT(os.stat(path).st_mode)
# except OSError:
# return None
#
# expanded_paths = [expand(path) for path in search_path]
# stat_paths = (_get_st_mode(path) for path in expanded_paths)
# load_paths = (_loader[st_mode](path)
# for path, st_mode in zip(expanded_paths, stat_paths)
# if st_mode is not None)
# raw_data = collections.OrderedDict(kv for kv in itertools.chain.from_iterable(load_paths))
#
# return raw_data
#
# Path: binstar_client/utils/yaml.py
# def yaml_load(stream):
# def yaml_dump(data, stream=None):
. Output only the next line. | save_config(config, config_file) |
Here is a snippet: <|code_start|>
def main(args):
config = get_config()
if args.show:
logger.info(yaml_dump(config))
return
if args.show_sources:
config_files = load_file_configs(SEARCH_PATH)
for path in config_files:
logger.info('==> %s <==', path)
logger.info(yaml_dump(config_files[path]))
return
if args.get:
if args.get in config:
logger.info(config[args.get])
else:
logger.info("The value of '%s' is not set." % args.get)
return
if args.files:
logger.info('User Config: %s' % USER_CONFIG)
logger.info('System Config: %s' % SYSTEM_CONFIG)
return
config_file = USER_CONFIG if args.user else SYSTEM_CONFIG
<|code_end|>
. Write the next line using the current file imports:
import logging
from argparse import RawDescriptionHelpFormatter
from six import text_type
from binstar_client.errors import ShowHelp
from binstar_client.utils.config import (SEARCH_PATH, USER_CONFIG, SYSTEM_CONFIG, CONFIGURATION_KEYS,
get_config, save_config, load_config, load_file_configs)
from ..utils.yaml import yaml_dump, safe_load
and context from other files:
# Path: binstar_client/utils/config.py
# SEARCH_PATH = (
# dirs.site_data_dir,
# '/etc/anaconda-client/',
# '$CONDA_ROOT/etc/anaconda-client/',
# dirs.user_data_dir,
# '~/.continuum/anaconda-client/',
# '$CONDA_PREFIX/etc/anaconda-client/',
# )
#
# USER_CONFIG = join(dirs.user_data_dir, 'config.yaml')
#
# SYSTEM_CONFIG = SITE_CONFIG
#
# CONFIGURATION_KEYS = [
# 'auto_register',
# 'default_site',
# 'upload_user',
# 'sites',
# 'url',
# 'verify_ssl',
# 'ssl_verify',
# ]
#
# def get_config(site=None):
# config = DEFAULT_CONFIG.copy()
#
# file_configs = load_file_configs(SEARCH_PATH)
# for fn in file_configs:
# recursive_update(config, file_configs[fn])
#
# site = site or config.get('default_site')
# sites = config.get('sites', {})
#
# if site:
# site = str(site)
#
# if site not in sites:
# logger.warning('Site alias "%s" does not exist in the config file', site)
# else:
# # This takes whatever keys are set for the site into the top level of the config dict
# recursive_update(config, sites.get(site, {}))
#
# return config
#
# def save_config(data, config_file):
# data_dir = dirname(config_file)
#
# try:
# if not exists(data_dir):
# os.makedirs(data_dir)
#
# with open(config_file, 'w') as fd:
# yaml_dump(data, stream=fd)
# except EnvironmentError as exc:
# raise BinstarError('%s: %s' % (exc.filename, exc.strerror,))
#
# def load_config(config_file):
# if exists(config_file):
# with open(config_file) as fd:
# data = yaml_load(fd)
# if data:
# return data
#
# return {}
#
# def load_file_configs(search_path):
# def _file_yaml_loader(fullpath):
# assert fullpath.endswith(".yml") or fullpath.endswith(".yaml") or fullpath.endswith("anacondarc"), fullpath
# yield fullpath, load_config(fullpath)
#
# def _dir_yaml_loader(fullpath):
# for filename in os.listdir(fullpath):
# if filename.endswith(".yml") or filename.endswith(".yaml"):
# filepath = join(fullpath, filename)
# yield filepath, load_config(filepath)
#
# # map a stat result to a file loader or a directory loader
# _loader = {
# stat.S_IFREG: _file_yaml_loader,
# stat.S_IFDIR: _dir_yaml_loader,
# }
#
# def _get_st_mode(path):
# # stat the path for file type, or None if path doesn't exist
# try:
# return stat.S_IFMT(os.stat(path).st_mode)
# except OSError:
# return None
#
# expanded_paths = [expand(path) for path in search_path]
# stat_paths = (_get_st_mode(path) for path in expanded_paths)
# load_paths = (_loader[st_mode](path)
# for path, st_mode in zip(expanded_paths, stat_paths)
# if st_mode is not None)
# raw_data = collections.OrderedDict(kv for kv in itertools.chain.from_iterable(load_paths))
#
# return raw_data
#
# Path: binstar_client/utils/yaml.py
# def yaml_load(stream):
# def yaml_dump(data, stream=None):
, which may include functions, classes, or code. Output only the next line. | config = load_config(config_file) |
Continue the code snippet: <|code_start|> config_data = config_data.setdefault(prefix, {})
if key not in CONFIGURATION_KEYS:
logger.warning('"%s" is not a known configuration key', key)
if key in DEPRECATED.keys():
message = "{} is deprecated: {}".format(key, DEPRECATED[key])
logger.warning(message)
config_data[key] = type_(value)
def recursive_remove(config_data, key):
while '.' in key:
if not config_data:
return
prefix, key = key.split('.', 1)
config_data = config_data.get(prefix, {})
del config_data[key]
def main(args):
config = get_config()
if args.show:
logger.info(yaml_dump(config))
return
if args.show_sources:
<|code_end|>
. Use current file imports:
import logging
from argparse import RawDescriptionHelpFormatter
from six import text_type
from binstar_client.errors import ShowHelp
from binstar_client.utils.config import (SEARCH_PATH, USER_CONFIG, SYSTEM_CONFIG, CONFIGURATION_KEYS,
get_config, save_config, load_config, load_file_configs)
from ..utils.yaml import yaml_dump, safe_load
and context (classes, functions, or code) from other files:
# Path: binstar_client/utils/config.py
# SEARCH_PATH = (
# dirs.site_data_dir,
# '/etc/anaconda-client/',
# '$CONDA_ROOT/etc/anaconda-client/',
# dirs.user_data_dir,
# '~/.continuum/anaconda-client/',
# '$CONDA_PREFIX/etc/anaconda-client/',
# )
#
# USER_CONFIG = join(dirs.user_data_dir, 'config.yaml')
#
# SYSTEM_CONFIG = SITE_CONFIG
#
# CONFIGURATION_KEYS = [
# 'auto_register',
# 'default_site',
# 'upload_user',
# 'sites',
# 'url',
# 'verify_ssl',
# 'ssl_verify',
# ]
#
# def get_config(site=None):
# config = DEFAULT_CONFIG.copy()
#
# file_configs = load_file_configs(SEARCH_PATH)
# for fn in file_configs:
# recursive_update(config, file_configs[fn])
#
# site = site or config.get('default_site')
# sites = config.get('sites', {})
#
# if site:
# site = str(site)
#
# if site not in sites:
# logger.warning('Site alias "%s" does not exist in the config file', site)
# else:
# # This takes whatever keys are set for the site into the top level of the config dict
# recursive_update(config, sites.get(site, {}))
#
# return config
#
# def save_config(data, config_file):
# data_dir = dirname(config_file)
#
# try:
# if not exists(data_dir):
# os.makedirs(data_dir)
#
# with open(config_file, 'w') as fd:
# yaml_dump(data, stream=fd)
# except EnvironmentError as exc:
# raise BinstarError('%s: %s' % (exc.filename, exc.strerror,))
#
# def load_config(config_file):
# if exists(config_file):
# with open(config_file) as fd:
# data = yaml_load(fd)
# if data:
# return data
#
# return {}
#
# def load_file_configs(search_path):
# def _file_yaml_loader(fullpath):
# assert fullpath.endswith(".yml") or fullpath.endswith(".yaml") or fullpath.endswith("anacondarc"), fullpath
# yield fullpath, load_config(fullpath)
#
# def _dir_yaml_loader(fullpath):
# for filename in os.listdir(fullpath):
# if filename.endswith(".yml") or filename.endswith(".yaml"):
# filepath = join(fullpath, filename)
# yield filepath, load_config(filepath)
#
# # map a stat result to a file loader or a directory loader
# _loader = {
# stat.S_IFREG: _file_yaml_loader,
# stat.S_IFDIR: _dir_yaml_loader,
# }
#
# def _get_st_mode(path):
# # stat the path for file type, or None if path doesn't exist
# try:
# return stat.S_IFMT(os.stat(path).st_mode)
# except OSError:
# return None
#
# expanded_paths = [expand(path) for path in search_path]
# stat_paths = (_get_st_mode(path) for path in expanded_paths)
# load_paths = (_loader[st_mode](path)
# for path, st_mode in zip(expanded_paths, stat_paths)
# if st_mode is not None)
# raw_data = collections.OrderedDict(kv for kv in itertools.chain.from_iterable(load_paths))
#
# return raw_data
#
# Path: binstar_client/utils/yaml.py
# def yaml_load(stream):
# def yaml_dump(data, stream=None):
. Output only the next line. | config_files = load_file_configs(SEARCH_PATH) |
Here is a snippet: <|code_start|>
def recursive_set(config_data, key, value, type_):
while '.' in key:
prefix, key = key.split('.', 1)
config_data = config_data.setdefault(prefix, {})
if key not in CONFIGURATION_KEYS:
logger.warning('"%s" is not a known configuration key', key)
if key in DEPRECATED.keys():
message = "{} is deprecated: {}".format(key, DEPRECATED[key])
logger.warning(message)
config_data[key] = type_(value)
def recursive_remove(config_data, key):
while '.' in key:
if not config_data:
return
prefix, key = key.split('.', 1)
config_data = config_data.get(prefix, {})
del config_data[key]
def main(args):
config = get_config()
if args.show:
<|code_end|>
. Write the next line using the current file imports:
import logging
from argparse import RawDescriptionHelpFormatter
from six import text_type
from binstar_client.errors import ShowHelp
from binstar_client.utils.config import (SEARCH_PATH, USER_CONFIG, SYSTEM_CONFIG, CONFIGURATION_KEYS,
get_config, save_config, load_config, load_file_configs)
from ..utils.yaml import yaml_dump, safe_load
and context from other files:
# Path: binstar_client/utils/config.py
# SEARCH_PATH = (
# dirs.site_data_dir,
# '/etc/anaconda-client/',
# '$CONDA_ROOT/etc/anaconda-client/',
# dirs.user_data_dir,
# '~/.continuum/anaconda-client/',
# '$CONDA_PREFIX/etc/anaconda-client/',
# )
#
# USER_CONFIG = join(dirs.user_data_dir, 'config.yaml')
#
# SYSTEM_CONFIG = SITE_CONFIG
#
# CONFIGURATION_KEYS = [
# 'auto_register',
# 'default_site',
# 'upload_user',
# 'sites',
# 'url',
# 'verify_ssl',
# 'ssl_verify',
# ]
#
# def get_config(site=None):
# config = DEFAULT_CONFIG.copy()
#
# file_configs = load_file_configs(SEARCH_PATH)
# for fn in file_configs:
# recursive_update(config, file_configs[fn])
#
# site = site or config.get('default_site')
# sites = config.get('sites', {})
#
# if site:
# site = str(site)
#
# if site not in sites:
# logger.warning('Site alias "%s" does not exist in the config file', site)
# else:
# # This takes whatever keys are set for the site into the top level of the config dict
# recursive_update(config, sites.get(site, {}))
#
# return config
#
# def save_config(data, config_file):
# data_dir = dirname(config_file)
#
# try:
# if not exists(data_dir):
# os.makedirs(data_dir)
#
# with open(config_file, 'w') as fd:
# yaml_dump(data, stream=fd)
# except EnvironmentError as exc:
# raise BinstarError('%s: %s' % (exc.filename, exc.strerror,))
#
# def load_config(config_file):
# if exists(config_file):
# with open(config_file) as fd:
# data = yaml_load(fd)
# if data:
# return data
#
# return {}
#
# def load_file_configs(search_path):
# def _file_yaml_loader(fullpath):
# assert fullpath.endswith(".yml") or fullpath.endswith(".yaml") or fullpath.endswith("anacondarc"), fullpath
# yield fullpath, load_config(fullpath)
#
# def _dir_yaml_loader(fullpath):
# for filename in os.listdir(fullpath):
# if filename.endswith(".yml") or filename.endswith(".yaml"):
# filepath = join(fullpath, filename)
# yield filepath, load_config(filepath)
#
# # map a stat result to a file loader or a directory loader
# _loader = {
# stat.S_IFREG: _file_yaml_loader,
# stat.S_IFDIR: _dir_yaml_loader,
# }
#
# def _get_st_mode(path):
# # stat the path for file type, or None if path doesn't exist
# try:
# return stat.S_IFMT(os.stat(path).st_mode)
# except OSError:
# return None
#
# expanded_paths = [expand(path) for path in search_path]
# stat_paths = (_get_st_mode(path) for path in expanded_paths)
# load_paths = (_loader[st_mode](path)
# for path, st_mode in zip(expanded_paths, stat_paths)
# if st_mode is not None)
# raw_data = collections.OrderedDict(kv for kv in itertools.chain.from_iterable(load_paths))
#
# return raw_data
#
# Path: binstar_client/utils/yaml.py
# def yaml_load(stream):
# def yaml_dump(data, stream=None):
, which may include functions, classes, or code. Output only the next line. | logger.info(yaml_dump(config)) |
Based on the snippet: <|code_start|> logger.info('System Config: %s' % SYSTEM_CONFIG)
return
config_file = USER_CONFIG if args.user else SYSTEM_CONFIG
config = load_config(config_file)
for key, value in args.set:
recursive_set(config, key, value, args.type)
for key in args.remove:
try:
recursive_remove(config, key)
except KeyError:
logger.error("Key %s does not exist" % key)
if not (args.set or args.remove):
raise ShowHelp()
save_config(config, config_file)
def add_parser(subparsers):
description = 'Anaconda client configuration'
parser = subparsers.add_parser('config',
help=description,
description=description,
epilog=__doc__,
formatter_class=RawDescriptionHelpFormatter)
<|code_end|>
, predict the immediate next line with the help of imports:
import logging
from argparse import RawDescriptionHelpFormatter
from six import text_type
from binstar_client.errors import ShowHelp
from binstar_client.utils.config import (SEARCH_PATH, USER_CONFIG, SYSTEM_CONFIG, CONFIGURATION_KEYS,
get_config, save_config, load_config, load_file_configs)
from ..utils.yaml import yaml_dump, safe_load
and context (classes, functions, sometimes code) from other files:
# Path: binstar_client/utils/config.py
# SEARCH_PATH = (
# dirs.site_data_dir,
# '/etc/anaconda-client/',
# '$CONDA_ROOT/etc/anaconda-client/',
# dirs.user_data_dir,
# '~/.continuum/anaconda-client/',
# '$CONDA_PREFIX/etc/anaconda-client/',
# )
#
# USER_CONFIG = join(dirs.user_data_dir, 'config.yaml')
#
# SYSTEM_CONFIG = SITE_CONFIG
#
# CONFIGURATION_KEYS = [
# 'auto_register',
# 'default_site',
# 'upload_user',
# 'sites',
# 'url',
# 'verify_ssl',
# 'ssl_verify',
# ]
#
# def get_config(site=None):
# config = DEFAULT_CONFIG.copy()
#
# file_configs = load_file_configs(SEARCH_PATH)
# for fn in file_configs:
# recursive_update(config, file_configs[fn])
#
# site = site or config.get('default_site')
# sites = config.get('sites', {})
#
# if site:
# site = str(site)
#
# if site not in sites:
# logger.warning('Site alias "%s" does not exist in the config file', site)
# else:
# # This takes whatever keys are set for the site into the top level of the config dict
# recursive_update(config, sites.get(site, {}))
#
# return config
#
# def save_config(data, config_file):
# data_dir = dirname(config_file)
#
# try:
# if not exists(data_dir):
# os.makedirs(data_dir)
#
# with open(config_file, 'w') as fd:
# yaml_dump(data, stream=fd)
# except EnvironmentError as exc:
# raise BinstarError('%s: %s' % (exc.filename, exc.strerror,))
#
# def load_config(config_file):
# if exists(config_file):
# with open(config_file) as fd:
# data = yaml_load(fd)
# if data:
# return data
#
# return {}
#
# def load_file_configs(search_path):
# def _file_yaml_loader(fullpath):
# assert fullpath.endswith(".yml") or fullpath.endswith(".yaml") or fullpath.endswith("anacondarc"), fullpath
# yield fullpath, load_config(fullpath)
#
# def _dir_yaml_loader(fullpath):
# for filename in os.listdir(fullpath):
# if filename.endswith(".yml") or filename.endswith(".yaml"):
# filepath = join(fullpath, filename)
# yield filepath, load_config(filepath)
#
# # map a stat result to a file loader or a directory loader
# _loader = {
# stat.S_IFREG: _file_yaml_loader,
# stat.S_IFDIR: _dir_yaml_loader,
# }
#
# def _get_st_mode(path):
# # stat the path for file type, or None if path doesn't exist
# try:
# return stat.S_IFMT(os.stat(path).st_mode)
# except OSError:
# return None
#
# expanded_paths = [expand(path) for path in search_path]
# stat_paths = (_get_st_mode(path) for path in expanded_paths)
# load_paths = (_loader[st_mode](path)
# for path, st_mode in zip(expanded_paths, stat_paths)
# if st_mode is not None)
# raw_data = collections.OrderedDict(kv for kv in itertools.chain.from_iterable(load_paths))
#
# return raw_data
#
# Path: binstar_client/utils/yaml.py
# def yaml_load(stream):
# def yaml_dump(data, stream=None):
. Output only the next line. | parser.add_argument('--type', default=safe_load, |
Predict the next line after this snippet: <|code_start|> # Copyright (c) 2012-2014 Continuum Analytics, Inc.
# TODO: it would be great if the installers had a unique identifier in the header
# Made by CAS installer
if "CAS-INSTALLER" in cio_copyright:
return True
# miniconda installer
elif "Copyright" not in cio_copyright:
return False
elif "Continuum Analytics, Inc." not in cio_copyright:
return False
return True
return False
def inspect_package(filename, fileobj, *args, **kwarg):
# skip #!/bin/bash
line = fileobj.readline()
lines = []
while line.startswith('#'):
if ':' in line:
lines.append(line.strip(" #\n"))
line = fileobj.readline()
try:
<|code_end|>
using the current file's imports:
import logging
from os import path
from ..utils.yaml import yaml_load
and any relevant context from other files:
# Path: binstar_client/utils/yaml.py
# def yaml_load(stream):
# """Loads a dictionary from a stream"""
# return safe_load(stream)
. Output only the next line. | installer_data = yaml_load("\n".join(lines)) |
Based on the snippet: <|code_start|> {u'name': u'pytz', u'specs': []},
{u'name': u'pyyaml', u'specs': []},
{u'name': u'requests',
u'specs': [(u'>=', u'2.0'),
(u'<=', u'3.0')]}],
u'environments': [{u'depends': [{u'name': u'argparse',
u'specs': []}],
u'name': u'python_version=="2.6"'}],
u'extras': [{u'depends': [{u'name': u'reportlab',
u'specs': [(u'>=', u'1.2')]},
{u'name': u'rxp',
u'specs': []},
],
u'name': u'PDF'},
{u'depends': [{u'name': u'docutils',
u'specs': [(u'>=', u'0.3')]}],
u'name': u'reST'}],
u'has_dep_errors': False}
expected_egg_file_data = {'attrs': {'packagetype': 'bdist_egg', 'python_version': 'source'},
'basename': 'test_package34-0.3.1-py2.7.egg',
'dependencies': expected_dependencies,
'platform': None}
class Test(unittest.TestCase):
maxDiff = None
def test_sdist(self):
filename = data_dir('test_package34-0.3.1.tar.gz')
with open(filename, 'rb') as fd:
<|code_end|>
, predict the immediate next line with the help of imports:
import os
import shutil
import tempfile
import unittest
from pprint import pprint
from binstar_client.inspect_package import pypi
from binstar_client.utils.test.utils import data_dir
and context (classes, functions, sometimes code) from other files:
# Path: binstar_client/inspect_package/pypi.py
# PARTIAL_PYPI_SPEC_PATTERN = re.compile(r'''
# # Text needs to be stripped and all extra spaces replaced by single spaces
# (?P<name>^[A-Z0-9][A-Z0-9._-]*)?
# \s?
# (\[(?P<extras>.*)\])?
# \s?
# (?P<constraints>\(? \s? ([\w\d<>=!~,\s\.\*]*) \s? \)? )?
# \s?
# ''', re.VERBOSE | re.IGNORECASE)
# def norm_package_name(name):
# def norm_package_version(version):
# def split_spec(spec, sep):
# def parse_specification(spec):
# def get_header_description(filedata):
# def python_version_check(filedata):
# def parse_requirement(line, deps, extras, extra):
# def parse_requires_txt(requires_txt):
# def format_requirements(requires):
# def format_run_requires_metadata(run_requires):
# def format_requires_metadata(run_requires):
# def format_sdist_header_metadata(data, filename):
# def format_wheel_json_metadata(data, filename, zipfile):
# def inspect_pypi_package_whl(filename, fileobj):
# def disutils_dependencies(config_items):
# def inspect_pypi_package_sdist(filename, fileobj):
# def inspect_pypi_package_egg(filename, fileobj):
# def inspect_pypi_package_zip(filename, fileobj):
# def inspect_pypi_package_exe(filename, fileobj):
# def inspect_pypi_package_rpm(filename, fileobj):
# def inspect_pypi_package(filename, fileobj, *args, **kwargs):
# def main():
#
# Path: binstar_client/utils/test/utils.py
# def data_dir(filename):
# base_path = inspect.stack()[1][1] # function caller path
# test_data = os.path.join(os.path.dirname(base_path), 'data')
# return os.path.join(test_data, filename)
. Output only the next line. | package_data, version_data, file_data = pypi.inspect_pypi_package(filename, fd) |
Given the code snippet: <|code_start|>
expected_whl_dependencies = {u'depends': [{u'name': u'python-dateutil', u'specs': []},
{u'name': u'pytz', u'specs': []},
{u'name': u'pyyaml', u'specs': []},
{u'name': u'requests',
u'specs': [(u'>=', u'2.0'),
(u'<=', u'3.0')]}],
u'environments': [{u'depends': [{u'name': u'argparse',
u'specs': []}],
u'name': u'python_version=="2.6"'}],
u'extras': [{u'depends': [{u'name': u'reportlab',
u'specs': [(u'>=', u'1.2')]},
{u'name': u'rxp',
u'specs': []},
],
u'name': u'PDF'},
{u'depends': [{u'name': u'docutils',
u'specs': [(u'>=', u'0.3')]}],
u'name': u'reST'}],
u'has_dep_errors': False}
expected_egg_file_data = {'attrs': {'packagetype': 'bdist_egg', 'python_version': 'source'},
'basename': 'test_package34-0.3.1-py2.7.egg',
'dependencies': expected_dependencies,
'platform': None}
class Test(unittest.TestCase):
maxDiff = None
def test_sdist(self):
<|code_end|>
, generate the next line using the imports in this file:
import os
import shutil
import tempfile
import unittest
from pprint import pprint
from binstar_client.inspect_package import pypi
from binstar_client.utils.test.utils import data_dir
and context (functions, classes, or occasionally code) from other files:
# Path: binstar_client/inspect_package/pypi.py
# PARTIAL_PYPI_SPEC_PATTERN = re.compile(r'''
# # Text needs to be stripped and all extra spaces replaced by single spaces
# (?P<name>^[A-Z0-9][A-Z0-9._-]*)?
# \s?
# (\[(?P<extras>.*)\])?
# \s?
# (?P<constraints>\(? \s? ([\w\d<>=!~,\s\.\*]*) \s? \)? )?
# \s?
# ''', re.VERBOSE | re.IGNORECASE)
# def norm_package_name(name):
# def norm_package_version(version):
# def split_spec(spec, sep):
# def parse_specification(spec):
# def get_header_description(filedata):
# def python_version_check(filedata):
# def parse_requirement(line, deps, extras, extra):
# def parse_requires_txt(requires_txt):
# def format_requirements(requires):
# def format_run_requires_metadata(run_requires):
# def format_requires_metadata(run_requires):
# def format_sdist_header_metadata(data, filename):
# def format_wheel_json_metadata(data, filename, zipfile):
# def inspect_pypi_package_whl(filename, fileobj):
# def disutils_dependencies(config_items):
# def inspect_pypi_package_sdist(filename, fileobj):
# def inspect_pypi_package_egg(filename, fileobj):
# def inspect_pypi_package_zip(filename, fileobj):
# def inspect_pypi_package_exe(filename, fileobj):
# def inspect_pypi_package_rpm(filename, fileobj):
# def inspect_pypi_package(filename, fileobj, *args, **kwargs):
# def main():
#
# Path: binstar_client/utils/test/utils.py
# def data_dir(filename):
# base_path = inspect.stack()[1][1] # function caller path
# test_data = os.path.join(os.path.dirname(base_path), 'data')
# return os.path.join(test_data, filename)
. Output only the next line. | filename = data_dir('test_package34-0.3.1.tar.gz') |
Next line prediction: <|code_start|>
class DataURIConverterTestCase(unittest.TestCase):
def test_local_image(self):
location = data_dir('bokeh-logo.png')
<|code_end|>
. Use current file imports:
(import unittest
from binstar_client.utils.notebook.data_uri import DataURIConverter
from binstar_client.utils.test.utils import data_dir)
and context including class names, function names, or small code snippets from other files:
# Path: binstar_client/utils/notebook/data_uri.py
# class DataURIConverter(object):
# def __init__(self, location):
# self.check_pillow_installed()
# self.location = location
#
# def check_pillow_installed(self):
# if Image is None:
# raise PillowNotInstalled()
#
# def __call__(self):
# if os.path.exists(self.location):
# with open(self.location, "rb") as fp:
# return self._encode(self.resize_and_convert(fp).read())
# elif self.is_url():
# content = requests.get(self.location).content
# fp = io.BytesIO()
# fp.write(content)
# fp.seek(0)
# return self._encode(self.resize_and_convert(fp).read())
# else:
# raise IOError("{} not found".format(self.location))
#
# def resize_and_convert(self, fp):
# im = Image.open(fp)
# im.thumbnail(THUMB_SIZE)
# out = io.BytesIO()
# im.save(out, format='png')
# out.seek(0)
# return out
#
# def is_py3(self):
# return sys.version_info[0] == 3
#
# def is_url(self):
# return self.location is not None and urlparse(self.location).scheme in ['http', 'https']
#
# def _encode(self, content):
# if self.is_py3():
# data64 = base64.b64encode(content).decode("ascii")
# else:
# data64 = content.encode('base64').replace("\n", "")
# return data64
#
# Path: binstar_client/utils/test/utils.py
# def data_dir(filename):
# base_path = inspect.stack()[1][1] # function caller path
# test_data = os.path.join(os.path.dirname(base_path), 'data')
# return os.path.join(test_data, filename)
. Output only the next line. | output = DataURIConverter(location)() |
Using the snippet: <|code_start|> 'description': 'test description',
'dev_url': 'https://dev.url',
'doc_url': 'https://doc.url',
'home': 'http://home.page',
'source_git_url': 'http://git.url',
}
app_expected_version_data = {
'home': 'http://home.page',
'description': 'test description',
'summary': u'',
'dev_url': 'https://dev.url',
'doc_url': 'https://doc.url',
'source_git_url': 'http://git.url',
'license': 'LICENSE',
'license_url': 'http://license.url',
'license_family': None,
'source_git_tag': 0.1,
'description': 'test description',
'home_page': 'http://home.page',
'icon': ICON_B64,
'version': u'0.1',
}
class Test(unittest.TestCase):
def test_conda_old(self):
filename = data_dir('conda_gc_test-1.2.1-py27_3.tar.bz2')
with open(filename, 'rb') as fd:
<|code_end|>
, determine the next line of code. You have imports:
import unittest
from binstar_client.inspect_package import conda
from binstar_client.utils.notebook.data_uri import data_uri_from
from binstar_client.utils.test.utils import data_dir
and context (class names, function names, or code) available:
# Path: binstar_client/inspect_package/conda.py
# def transform_conda_deps(deps):
# def get_subdir(index):
# def inspect_conda_package(filename, fileobj, *args, **kwargs):
# def main():
#
# Path: binstar_client/utils/notebook/data_uri.py
# def data_uri_from(location):
# return DataURIConverter(location)()
#
# Path: binstar_client/utils/test/utils.py
# def data_dir(filename):
# base_path = inspect.stack()[1][1] # function caller path
# test_data = os.path.join(os.path.dirname(base_path), 'data')
# return os.path.join(test_data, filename)
. Output only the next line. | package_data, version_data, file_data = conda.inspect_conda_package(filename, fd) |
Given the code snippet: <|code_start|> 'basename': 'osx-64/conda_gc_test-1.2.1-py27_3.tar.bz2',
'dependencies': {
'depends': [{'name': 'foo', 'specs': [['==', '3']]},
{'name': 'python', 'specs': [['==', '2.7.8']]}],
},
}
expected_file_data_221 = {
'attrs': {
'arch': 'x86_64',
'build': 'py27_3',
'build_number': 3,
'depends': ['foo ==3*', 'python ==2.7.8'],
#'license': None,
'machine': 'x86_64',
'operatingsystem': 'linux',
'platform': 'linux',
'subdir': 'linux-64',
'target-triplet': 'x86_64-any-linux',
'has_prefix': False,
},
'basename': 'linux-64/conda_gc_test-2.2.1-py27_3.tar.bz2',
'dependencies': {
'depends': [{'name': 'foo', 'specs': [['==', '3']]},
{'name': 'python', 'specs': [['==', '2.7.8']]}],
},
}
# Test package application data
# -----------------------------------------------------------------------------
<|code_end|>
, generate the next line using the imports in this file:
import unittest
from binstar_client.inspect_package import conda
from binstar_client.utils.notebook.data_uri import data_uri_from
from binstar_client.utils.test.utils import data_dir
and context (functions, classes, or occasionally code) from other files:
# Path: binstar_client/inspect_package/conda.py
# def transform_conda_deps(deps):
# def get_subdir(index):
# def inspect_conda_package(filename, fileobj, *args, **kwargs):
# def main():
#
# Path: binstar_client/utils/notebook/data_uri.py
# def data_uri_from(location):
# return DataURIConverter(location)()
#
# Path: binstar_client/utils/test/utils.py
# def data_dir(filename):
# base_path = inspect.stack()[1][1] # function caller path
# test_data = os.path.join(os.path.dirname(base_path), 'data')
# return os.path.join(test_data, filename)
. Output only the next line. | ICON_B64 = data_uri_from(data_dir('43c9b994a4d96f779dad87219d645c9f.png')) |
Given the code snippet: <|code_start|> 'basename': 'osx-64/conda_gc_test-1.2.1-py27_3.tar.bz2',
'dependencies': {
'depends': [{'name': 'foo', 'specs': [['==', '3']]},
{'name': 'python', 'specs': [['==', '2.7.8']]}],
},
}
expected_file_data_221 = {
'attrs': {
'arch': 'x86_64',
'build': 'py27_3',
'build_number': 3,
'depends': ['foo ==3*', 'python ==2.7.8'],
#'license': None,
'machine': 'x86_64',
'operatingsystem': 'linux',
'platform': 'linux',
'subdir': 'linux-64',
'target-triplet': 'x86_64-any-linux',
'has_prefix': False,
},
'basename': 'linux-64/conda_gc_test-2.2.1-py27_3.tar.bz2',
'dependencies': {
'depends': [{'name': 'foo', 'specs': [['==', '3']]},
{'name': 'python', 'specs': [['==', '2.7.8']]}],
},
}
# Test package application data
# -----------------------------------------------------------------------------
<|code_end|>
, generate the next line using the imports in this file:
import unittest
from binstar_client.inspect_package import conda
from binstar_client.utils.notebook.data_uri import data_uri_from
from binstar_client.utils.test.utils import data_dir
and context (functions, classes, or occasionally code) from other files:
# Path: binstar_client/inspect_package/conda.py
# def transform_conda_deps(deps):
# def get_subdir(index):
# def inspect_conda_package(filename, fileobj, *args, **kwargs):
# def main():
#
# Path: binstar_client/utils/notebook/data_uri.py
# def data_uri_from(location):
# return DataURIConverter(location)()
#
# Path: binstar_client/utils/test/utils.py
# def data_dir(filename):
# base_path = inspect.stack()[1][1] # function caller path
# test_data = os.path.join(os.path.dirname(base_path), 'data')
# return os.path.join(test_data, filename)
. Output only the next line. | ICON_B64 = data_uri_from(data_dir('43c9b994a4d96f779dad87219d645c9f.png')) |
Given snippet: <|code_start|># -*- coding: utf-8 -*-
"""Test anaconda-client configuration set/get."""
# Standard library imports
# Third party imports
# Local imports
class Test(unittest.TestCase):
def create_config_dirs(self):
tmpdir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, tmpdir)
system_dir = join(tmpdir, 'system')
user_dir = join(tmpdir, 'user')
os.mkdir(system_dir)
os.mkdir(user_dir)
return user_dir, system_dir
def test_defaults(self):
user_dir, system_dir = self.create_config_dirs()
with open(join(user_dir, 'config.yaml'), 'wb') as fd:
fd.write(b'')
with mock.patch('binstar_client.utils.config.SEARCH_PATH',
[system_dir, user_dir]):
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from os.path import join
from binstar_client.utils import config
import os
import shutil
import tempfile
import unittest
import mock
and context:
# Path: binstar_client/utils/config.py
# def expandvars(path):
# def expand(path):
# def recursive_update(config, update_dict):
# def get_server_api(token=None, site=None, cls=None, config=None, **kwargs):
# def get_binstar(args=None, cls=None):
# def store_token(token, args):
# def load_token(url):
# def remove_token(args):
# def load_config(config_file):
# def load_file_configs(search_path):
# def _file_yaml_loader(fullpath):
# def _dir_yaml_loader(fullpath):
# def _get_st_mode(path):
# def get_config(site=None):
# def save_config(data, config_file):
# def set_config(data, user=True):
# USER_CONFIG = join(dirs.user_data_dir, 'config.yaml')
# USER_CONFIG = expand('~/.continuum/anaconda-client/config.yaml')
# PACKAGE_TYPES = {
# 'env': 'Environment',
# 'ipynb': 'Notebook',
# 'conda' : 'Conda Package',
# 'pypi': 'Python Package',
# }
# USER_LOGDIR = dirs.user_log_dir
# SITE_CONFIG = expand('$CONDA_ROOT/etc/anaconda-client/config.yaml')
# SYSTEM_CONFIG = SITE_CONFIG
# DEFAULT_URL = 'https://api.anaconda.org'
# DEFAULT_CONFIG = {
# 'sites': {
# 'anaconda': {'url': DEFAULT_URL},
# 'binstar': {'url': DEFAULT_URL},
# },
# 'auto_register': True,
# 'default_site': None,
# 'url': DEFAULT_URL,
# 'ssl_verify': True
# }
# CONFIGURATION_KEYS = [
# 'auto_register',
# 'default_site',
# 'upload_user',
# 'sites',
# 'url',
# 'verify_ssl',
# 'ssl_verify',
# ]
# SEARCH_PATH = (
# dirs.site_data_dir,
# '/etc/anaconda-client/',
# '$CONDA_ROOT/etc/anaconda-client/',
# dirs.user_data_dir,
# '~/.continuum/anaconda-client/',
# '$CONDA_PREFIX/etc/anaconda-client/',
# )
# TOKEN_DIRS = [
# dirs.user_data_dir,
# join(dirname(USER_CONFIG), 'tokens'),
# ]
# TOKEN_DIR = TOKEN_DIRS[-1]
which might include code, classes, or functions. Output only the next line. | cfg = config.get_config() |
Predict the next line for this snippet: <|code_start|>
class EnvInspectorTestCase(unittest.TestCase):
def test_package_name(self):
with open(data_dir('environment.yml')) as fileobj:
<|code_end|>
with the help of current file imports:
import unittest
from ..env import EnvInspector, inspect_env_package
from binstar_client.utils.test.utils import data_dir
and context from other files:
# Path: binstar_client/inspect_package/env.py
# class EnvInspector(object):
# def __init__(self, filename, fileobj):
# self._name = None
# self._version = None
# self.filename = filename
# self.content = yaml_load(fileobj)
#
# @property
# def basename(self):
# return os.path.basename(self.filename)
#
# @property
# def name(self):
# if self._name is None:
# self._name = self.content['name']
# return self._name
#
# def get_package_data(self):
# return {
# 'name': self.name,
# 'summary': "Environment file"
# }
#
# @property
# def version(self):
# if self._version is None:
# self._version = time.strftime('%Y.%m.%d.%H%M%S')
#
# return self._version
#
# def inspect_env_package(filename, fileobj, *args, **kwargs):
# environment = EnvInspector(filename, fileobj)
#
# package_data = environment.get_package_data()
# release_data = {
# 'version': environment.version,
# 'description': ''
# }
# file_data = {
# 'basename': environment.basename,
# 'attrs': {}
# }
#
# return package_data, release_data, file_data
#
# Path: binstar_client/utils/test/utils.py
# def data_dir(filename):
# base_path = inspect.stack()[1][1] # function caller path
# test_data = os.path.join(os.path.dirname(base_path), 'data')
# return os.path.join(test_data, filename)
, which may contain function names, class names, or code. Output only the next line. | assert EnvInspector('environment.yml', fileobj).name == 'stats' |
Given the following code snippet before the placeholder: <|code_start|>
class EnvInspectorTestCase(unittest.TestCase):
def test_package_name(self):
with open(data_dir('environment.yml')) as fileobj:
assert EnvInspector('environment.yml', fileobj).name == 'stats'
def test_version(self):
with open(data_dir('environment.yml')) as fileobj:
self.assertIsInstance(EnvInspector('environment.yml', fileobj).version, str)
class InspectEnvironmentPackageTest(unittest.TestCase):
def test_inspect_env_package(self):
with open(data_dir('environment.yml')) as fileobj:
<|code_end|>
, predict the next line using imports from the current file:
import unittest
from ..env import EnvInspector, inspect_env_package
from binstar_client.utils.test.utils import data_dir
and context including class names, function names, and sometimes code from other files:
# Path: binstar_client/inspect_package/env.py
# class EnvInspector(object):
# def __init__(self, filename, fileobj):
# self._name = None
# self._version = None
# self.filename = filename
# self.content = yaml_load(fileobj)
#
# @property
# def basename(self):
# return os.path.basename(self.filename)
#
# @property
# def name(self):
# if self._name is None:
# self._name = self.content['name']
# return self._name
#
# def get_package_data(self):
# return {
# 'name': self.name,
# 'summary': "Environment file"
# }
#
# @property
# def version(self):
# if self._version is None:
# self._version = time.strftime('%Y.%m.%d.%H%M%S')
#
# return self._version
#
# def inspect_env_package(filename, fileobj, *args, **kwargs):
# environment = EnvInspector(filename, fileobj)
#
# package_data = environment.get_package_data()
# release_data = {
# 'version': environment.version,
# 'description': ''
# }
# file_data = {
# 'basename': environment.basename,
# 'attrs': {}
# }
#
# return package_data, release_data, file_data
#
# Path: binstar_client/utils/test/utils.py
# def data_dir(filename):
# base_path = inspect.stack()[1][1] # function caller path
# test_data = os.path.join(os.path.dirname(base_path), 'data')
# return os.path.join(test_data, filename)
. Output only the next line. | package_data, release_data, file_data = inspect_env_package( |
Here is a snippet: <|code_start|>'''
Created on Feb 18, 2014
@author: sean
'''
from __future__ import unicode_literals
@unittest.skip("Need to change this to binsar package --create")
class Test(CLITestCase):
@urlpatch
def test_register_public(self, registry):
r1 = registry.register(method='GET', path='/user', content='{"login": "eggs"}')
r2 = registry.register(method='GET', path='/package/eggs/foo', status=404)
r3 = registry.register(method='POST', path='/package/eggs/foo', status=200, content='{"login": "eggs"}')
<|code_end|>
. Write the next line using the current file imports:
import base64
import json
import unittest
from binstar_client.scripts.cli import main
from binstar_client.tests.fixture import CLITestCase
from binstar_client.tests.urlmock import urlpatch
from binstar_client.utils.test.utils import data_dir
and context from other files:
# Path: binstar_client/scripts/cli.py
# def main(args=None, exit=True):
# binstar_main(command_module, args, exit,
# description=__doc__, version=version)
#
# Path: binstar_client/tests/fixture.py
# class CLITestCase(unittest.TestCase):
# def setUp(self):
# self.get_config_patch = mock.patch('binstar_client.utils.get_config')
# self.get_config = self.get_config_patch.start()
# self.get_config.return_value = {}
#
# self.load_token_patch = mock.patch('binstar_client.utils.config.load_token')
# self.load_token = self.load_token_patch.start()
# self.load_token.return_value = '123'
#
# self.store_token_patch = mock.patch('binstar_client.utils.config.store_token')
# self.store_token = self.store_token_patch.start()
#
# self.setup_logging_patch = mock.patch('binstar_client.scripts.cli._setup_logging')
# self.setup_logging_patch.start()
#
# self.logger = logger = logging.getLogger('binstar')
# logger.setLevel(logging.INFO)
# self.stream = AnyIO()
# self.hndlr = hndlr = logging.StreamHandler(stream=self.stream)
# hndlr.setLevel(logging.INFO)
# logger.addHandler(hndlr)
#
# def tearDown(self):
# self.setup_logging_patch.stop()
# self.get_config_patch.stop()
# self.load_token_patch.stop()
# self.store_token_patch.stop()
#
# self.logger.removeHandler(self.hndlr)
#
# Path: binstar_client/tests/urlmock.py
# def urlpatch(func):
# @wraps(func)
# def inner(self, *args, **kwargs):
# with Registry() as r:
# return func(self, r, *args, **kwargs)
# return inner
#
# Path: binstar_client/utils/test/utils.py
# def data_dir(filename):
# base_path = inspect.stack()[1][1] # function caller path
# test_data = os.path.join(os.path.dirname(base_path), 'data')
# return os.path.join(test_data, filename)
, which may include functions, classes, or code. Output only the next line. | main(['--show-traceback', 'register', data_dir('foo-0.1-0.tar.bz2')], False) |
Next line prediction: <|code_start|>'''
Created on Feb 18, 2014
@author: sean
'''
from __future__ import unicode_literals
@unittest.skip("Need to change this to binsar package --create")
<|code_end|>
. Use current file imports:
(import base64
import json
import unittest
from binstar_client.scripts.cli import main
from binstar_client.tests.fixture import CLITestCase
from binstar_client.tests.urlmock import urlpatch
from binstar_client.utils.test.utils import data_dir)
and context including class names, function names, or small code snippets from other files:
# Path: binstar_client/scripts/cli.py
# def main(args=None, exit=True):
# binstar_main(command_module, args, exit,
# description=__doc__, version=version)
#
# Path: binstar_client/tests/fixture.py
# class CLITestCase(unittest.TestCase):
# def setUp(self):
# self.get_config_patch = mock.patch('binstar_client.utils.get_config')
# self.get_config = self.get_config_patch.start()
# self.get_config.return_value = {}
#
# self.load_token_patch = mock.patch('binstar_client.utils.config.load_token')
# self.load_token = self.load_token_patch.start()
# self.load_token.return_value = '123'
#
# self.store_token_patch = mock.patch('binstar_client.utils.config.store_token')
# self.store_token = self.store_token_patch.start()
#
# self.setup_logging_patch = mock.patch('binstar_client.scripts.cli._setup_logging')
# self.setup_logging_patch.start()
#
# self.logger = logger = logging.getLogger('binstar')
# logger.setLevel(logging.INFO)
# self.stream = AnyIO()
# self.hndlr = hndlr = logging.StreamHandler(stream=self.stream)
# hndlr.setLevel(logging.INFO)
# logger.addHandler(hndlr)
#
# def tearDown(self):
# self.setup_logging_patch.stop()
# self.get_config_patch.stop()
# self.load_token_patch.stop()
# self.store_token_patch.stop()
#
# self.logger.removeHandler(self.hndlr)
#
# Path: binstar_client/tests/urlmock.py
# def urlpatch(func):
# @wraps(func)
# def inner(self, *args, **kwargs):
# with Registry() as r:
# return func(self, r, *args, **kwargs)
# return inner
#
# Path: binstar_client/utils/test/utils.py
# def data_dir(filename):
# base_path = inspect.stack()[1][1] # function caller path
# test_data = os.path.join(os.path.dirname(base_path), 'data')
# return os.path.join(test_data, filename)
. Output only the next line. | class Test(CLITestCase): |
Based on the snippet: <|code_start|>'''
Created on Feb 18, 2014
@author: sean
'''
from __future__ import unicode_literals
@unittest.skip("Need to change this to binsar package --create")
class Test(CLITestCase):
<|code_end|>
, predict the immediate next line with the help of imports:
import base64
import json
import unittest
from binstar_client.scripts.cli import main
from binstar_client.tests.fixture import CLITestCase
from binstar_client.tests.urlmock import urlpatch
from binstar_client.utils.test.utils import data_dir
and context (classes, functions, sometimes code) from other files:
# Path: binstar_client/scripts/cli.py
# def main(args=None, exit=True):
# binstar_main(command_module, args, exit,
# description=__doc__, version=version)
#
# Path: binstar_client/tests/fixture.py
# class CLITestCase(unittest.TestCase):
# def setUp(self):
# self.get_config_patch = mock.patch('binstar_client.utils.get_config')
# self.get_config = self.get_config_patch.start()
# self.get_config.return_value = {}
#
# self.load_token_patch = mock.patch('binstar_client.utils.config.load_token')
# self.load_token = self.load_token_patch.start()
# self.load_token.return_value = '123'
#
# self.store_token_patch = mock.patch('binstar_client.utils.config.store_token')
# self.store_token = self.store_token_patch.start()
#
# self.setup_logging_patch = mock.patch('binstar_client.scripts.cli._setup_logging')
# self.setup_logging_patch.start()
#
# self.logger = logger = logging.getLogger('binstar')
# logger.setLevel(logging.INFO)
# self.stream = AnyIO()
# self.hndlr = hndlr = logging.StreamHandler(stream=self.stream)
# hndlr.setLevel(logging.INFO)
# logger.addHandler(hndlr)
#
# def tearDown(self):
# self.setup_logging_patch.stop()
# self.get_config_patch.stop()
# self.load_token_patch.stop()
# self.store_token_patch.stop()
#
# self.logger.removeHandler(self.hndlr)
#
# Path: binstar_client/tests/urlmock.py
# def urlpatch(func):
# @wraps(func)
# def inner(self, *args, **kwargs):
# with Registry() as r:
# return func(self, r, *args, **kwargs)
# return inner
#
# Path: binstar_client/utils/test/utils.py
# def data_dir(filename):
# base_path = inspect.stack()[1][1] # function caller path
# test_data = os.path.join(os.path.dirname(base_path), 'data')
# return os.path.join(test_data, filename)
. Output only the next line. | @urlpatch |
Continue the code snippet: <|code_start|>'''
Created on Feb 18, 2014
@author: sean
'''
from __future__ import unicode_literals
@unittest.skip("Need to change this to binsar package --create")
class Test(CLITestCase):
@urlpatch
def test_register_public(self, registry):
r1 = registry.register(method='GET', path='/user', content='{"login": "eggs"}')
r2 = registry.register(method='GET', path='/package/eggs/foo', status=404)
r3 = registry.register(method='POST', path='/package/eggs/foo', status=200, content='{"login": "eggs"}')
<|code_end|>
. Use current file imports:
import base64
import json
import unittest
from binstar_client.scripts.cli import main
from binstar_client.tests.fixture import CLITestCase
from binstar_client.tests.urlmock import urlpatch
from binstar_client.utils.test.utils import data_dir
and context (classes, functions, or code) from other files:
# Path: binstar_client/scripts/cli.py
# def main(args=None, exit=True):
# binstar_main(command_module, args, exit,
# description=__doc__, version=version)
#
# Path: binstar_client/tests/fixture.py
# class CLITestCase(unittest.TestCase):
# def setUp(self):
# self.get_config_patch = mock.patch('binstar_client.utils.get_config')
# self.get_config = self.get_config_patch.start()
# self.get_config.return_value = {}
#
# self.load_token_patch = mock.patch('binstar_client.utils.config.load_token')
# self.load_token = self.load_token_patch.start()
# self.load_token.return_value = '123'
#
# self.store_token_patch = mock.patch('binstar_client.utils.config.store_token')
# self.store_token = self.store_token_patch.start()
#
# self.setup_logging_patch = mock.patch('binstar_client.scripts.cli._setup_logging')
# self.setup_logging_patch.start()
#
# self.logger = logger = logging.getLogger('binstar')
# logger.setLevel(logging.INFO)
# self.stream = AnyIO()
# self.hndlr = hndlr = logging.StreamHandler(stream=self.stream)
# hndlr.setLevel(logging.INFO)
# logger.addHandler(hndlr)
#
# def tearDown(self):
# self.setup_logging_patch.stop()
# self.get_config_patch.stop()
# self.load_token_patch.stop()
# self.store_token_patch.stop()
#
# self.logger.removeHandler(self.hndlr)
#
# Path: binstar_client/tests/urlmock.py
# def urlpatch(func):
# @wraps(func)
# def inner(self, *args, **kwargs):
# with Registry() as r:
# return func(self, r, *args, **kwargs)
# return inner
#
# Path: binstar_client/utils/test/utils.py
# def data_dir(filename):
# base_path = inspect.stack()[1][1] # function caller path
# test_data = os.path.join(os.path.dirname(base_path), 'data')
# return os.path.join(test_data, filename)
. Output only the next line. | main(['--show-traceback', 'register', data_dir('foo-0.1-0.tar.bz2')], False) |
Using the snippet: <|code_start|># -*- coding: utf-8 -*-
class TestMultiPart(unittest.TestCase):
def test_unicode_read(self):
body = io.BytesIO(u'Unicode™'.encode('utf-8'))
<|code_end|>
, determine the next line of code. You have imports:
import io
import unittest
from binstar_client import requests_ext
and context (class names, function names, or code) available:
# Path: binstar_client/requests_ext.py
# def encode_multipart_formdata_stream(fields, boundary=None):
# def body_write(item):
# def __init__(self, body, callback=None):
# def read(self, n= -1):
# def tell(self):
# def seek(self, pos, mode=0):
# def stream_multipart(data, files=None, callback=None):
# def __call__(self, r):
# class MultiPartIO(object):
# class NullAuth(requests.auth.AuthBase):
. Output only the next line. | multipart = requests_ext.MultiPartIO([body]) |
Next line prediction: <|code_start|>'''
Created on Feb 18, 2014
@author: sean
'''
from __future__ import unicode_literals
class Test(CLITestCase):
@urlpatch
def test_whoami_anon(self, urls):
user = urls.register(method='GET', path='/user', status=401)
<|code_end|>
. Use current file imports:
(import json
import mock
import os
import unittest
import requests.utils
from binstar_client.scripts.cli import main
from binstar_client.tests.fixture import CLITestCase
from binstar_client.tests.urlmock import urlpatch
from binstar_client.utils.test.utils import data_dir)
and context including class names, function names, or small code snippets from other files:
# Path: binstar_client/scripts/cli.py
# def main(args=None, exit=True):
# binstar_main(command_module, args, exit,
# description=__doc__, version=version)
#
# Path: binstar_client/tests/fixture.py
# class CLITestCase(unittest.TestCase):
# def setUp(self):
# self.get_config_patch = mock.patch('binstar_client.utils.get_config')
# self.get_config = self.get_config_patch.start()
# self.get_config.return_value = {}
#
# self.load_token_patch = mock.patch('binstar_client.utils.config.load_token')
# self.load_token = self.load_token_patch.start()
# self.load_token.return_value = '123'
#
# self.store_token_patch = mock.patch('binstar_client.utils.config.store_token')
# self.store_token = self.store_token_patch.start()
#
# self.setup_logging_patch = mock.patch('binstar_client.scripts.cli._setup_logging')
# self.setup_logging_patch.start()
#
# self.logger = logger = logging.getLogger('binstar')
# logger.setLevel(logging.INFO)
# self.stream = AnyIO()
# self.hndlr = hndlr = logging.StreamHandler(stream=self.stream)
# hndlr.setLevel(logging.INFO)
# logger.addHandler(hndlr)
#
# def tearDown(self):
# self.setup_logging_patch.stop()
# self.get_config_patch.stop()
# self.load_token_patch.stop()
# self.store_token_patch.stop()
#
# self.logger.removeHandler(self.hndlr)
#
# Path: binstar_client/tests/urlmock.py
# def urlpatch(func):
# @wraps(func)
# def inner(self, *args, **kwargs):
# with Registry() as r:
# return func(self, r, *args, **kwargs)
# return inner
#
# Path: binstar_client/utils/test/utils.py
# def data_dir(filename):
# base_path = inspect.stack()[1][1] # function caller path
# test_data = os.path.join(os.path.dirname(base_path), 'data')
# return os.path.join(test_data, filename)
. Output only the next line. | main(['--show-traceback', 'whoami'], False) |
Here is a snippet: <|code_start|>'''
Created on Feb 18, 2014
@author: sean
'''
from __future__ import unicode_literals
<|code_end|>
. Write the next line using the current file imports:
import json
import mock
import os
import unittest
import requests.utils
from binstar_client.scripts.cli import main
from binstar_client.tests.fixture import CLITestCase
from binstar_client.tests.urlmock import urlpatch
from binstar_client.utils.test.utils import data_dir
and context from other files:
# Path: binstar_client/scripts/cli.py
# def main(args=None, exit=True):
# binstar_main(command_module, args, exit,
# description=__doc__, version=version)
#
# Path: binstar_client/tests/fixture.py
# class CLITestCase(unittest.TestCase):
# def setUp(self):
# self.get_config_patch = mock.patch('binstar_client.utils.get_config')
# self.get_config = self.get_config_patch.start()
# self.get_config.return_value = {}
#
# self.load_token_patch = mock.patch('binstar_client.utils.config.load_token')
# self.load_token = self.load_token_patch.start()
# self.load_token.return_value = '123'
#
# self.store_token_patch = mock.patch('binstar_client.utils.config.store_token')
# self.store_token = self.store_token_patch.start()
#
# self.setup_logging_patch = mock.patch('binstar_client.scripts.cli._setup_logging')
# self.setup_logging_patch.start()
#
# self.logger = logger = logging.getLogger('binstar')
# logger.setLevel(logging.INFO)
# self.stream = AnyIO()
# self.hndlr = hndlr = logging.StreamHandler(stream=self.stream)
# hndlr.setLevel(logging.INFO)
# logger.addHandler(hndlr)
#
# def tearDown(self):
# self.setup_logging_patch.stop()
# self.get_config_patch.stop()
# self.load_token_patch.stop()
# self.store_token_patch.stop()
#
# self.logger.removeHandler(self.hndlr)
#
# Path: binstar_client/tests/urlmock.py
# def urlpatch(func):
# @wraps(func)
# def inner(self, *args, **kwargs):
# with Registry() as r:
# return func(self, r, *args, **kwargs)
# return inner
#
# Path: binstar_client/utils/test/utils.py
# def data_dir(filename):
# base_path = inspect.stack()[1][1] # function caller path
# test_data = os.path.join(os.path.dirname(base_path), 'data')
# return os.path.join(test_data, filename)
, which may include functions, classes, or code. Output only the next line. | class Test(CLITestCase): |
Next line prediction: <|code_start|>'''
Created on Feb 18, 2014
@author: sean
'''
from __future__ import unicode_literals
class Test(CLITestCase):
<|code_end|>
. Use current file imports:
(import json
import mock
import os
import unittest
import requests.utils
from binstar_client.scripts.cli import main
from binstar_client.tests.fixture import CLITestCase
from binstar_client.tests.urlmock import urlpatch
from binstar_client.utils.test.utils import data_dir)
and context including class names, function names, or small code snippets from other files:
# Path: binstar_client/scripts/cli.py
# def main(args=None, exit=True):
# binstar_main(command_module, args, exit,
# description=__doc__, version=version)
#
# Path: binstar_client/tests/fixture.py
# class CLITestCase(unittest.TestCase):
# def setUp(self):
# self.get_config_patch = mock.patch('binstar_client.utils.get_config')
# self.get_config = self.get_config_patch.start()
# self.get_config.return_value = {}
#
# self.load_token_patch = mock.patch('binstar_client.utils.config.load_token')
# self.load_token = self.load_token_patch.start()
# self.load_token.return_value = '123'
#
# self.store_token_patch = mock.patch('binstar_client.utils.config.store_token')
# self.store_token = self.store_token_patch.start()
#
# self.setup_logging_patch = mock.patch('binstar_client.scripts.cli._setup_logging')
# self.setup_logging_patch.start()
#
# self.logger = logger = logging.getLogger('binstar')
# logger.setLevel(logging.INFO)
# self.stream = AnyIO()
# self.hndlr = hndlr = logging.StreamHandler(stream=self.stream)
# hndlr.setLevel(logging.INFO)
# logger.addHandler(hndlr)
#
# def tearDown(self):
# self.setup_logging_patch.stop()
# self.get_config_patch.stop()
# self.load_token_patch.stop()
# self.store_token_patch.stop()
#
# self.logger.removeHandler(self.hndlr)
#
# Path: binstar_client/tests/urlmock.py
# def urlpatch(func):
# @wraps(func)
# def inner(self, *args, **kwargs):
# with Registry() as r:
# return func(self, r, *args, **kwargs)
# return inner
#
# Path: binstar_client/utils/test/utils.py
# def data_dir(filename):
# base_path = inspect.stack()[1][1] # function caller path
# test_data = os.path.join(os.path.dirname(base_path), 'data')
# return os.path.join(test_data, filename)
. Output only the next line. | @urlpatch |
Given the code snippet: <|code_start|>
class Test(CLITestCase):
@urlpatch
def test_whoami_anon(self, urls):
user = urls.register(method='GET', path='/user', status=401)
main(['--show-traceback', 'whoami'], False)
self.assertIn('Anonymous User', self.stream.getvalue())
user.assertCalled()
@urlpatch
def test_whoami(self, urls):
content = json.dumps({'login': 'eggs', 'created_at':'1/2/2000'})
user = urls.register(method='GET', path='/user', content=content)
main(['--show-traceback', 'whoami'], False)
self.assertIn('eggs', self.stream.getvalue())
user.assertCalled()
@urlpatch
@mock.patch('os.path.expanduser')
def test_netrc_ignored(self, urls, expanduser):
# Disable token authentication
self.load_token.return_value = None
os.environ.pop('BINSTAR_API_TOKEN', None)
os.environ.pop('ANACONDA_API_TOKEN', None)
# requests.get_netrc_auth uses expanduser to find the netrc file, point to our
# test file
<|code_end|>
, generate the next line using the imports in this file:
import json
import mock
import os
import unittest
import requests.utils
from binstar_client.scripts.cli import main
from binstar_client.tests.fixture import CLITestCase
from binstar_client.tests.urlmock import urlpatch
from binstar_client.utils.test.utils import data_dir
and context (functions, classes, or occasionally code) from other files:
# Path: binstar_client/scripts/cli.py
# def main(args=None, exit=True):
# binstar_main(command_module, args, exit,
# description=__doc__, version=version)
#
# Path: binstar_client/tests/fixture.py
# class CLITestCase(unittest.TestCase):
# def setUp(self):
# self.get_config_patch = mock.patch('binstar_client.utils.get_config')
# self.get_config = self.get_config_patch.start()
# self.get_config.return_value = {}
#
# self.load_token_patch = mock.patch('binstar_client.utils.config.load_token')
# self.load_token = self.load_token_patch.start()
# self.load_token.return_value = '123'
#
# self.store_token_patch = mock.patch('binstar_client.utils.config.store_token')
# self.store_token = self.store_token_patch.start()
#
# self.setup_logging_patch = mock.patch('binstar_client.scripts.cli._setup_logging')
# self.setup_logging_patch.start()
#
# self.logger = logger = logging.getLogger('binstar')
# logger.setLevel(logging.INFO)
# self.stream = AnyIO()
# self.hndlr = hndlr = logging.StreamHandler(stream=self.stream)
# hndlr.setLevel(logging.INFO)
# logger.addHandler(hndlr)
#
# def tearDown(self):
# self.setup_logging_patch.stop()
# self.get_config_patch.stop()
# self.load_token_patch.stop()
# self.store_token_patch.stop()
#
# self.logger.removeHandler(self.hndlr)
#
# Path: binstar_client/tests/urlmock.py
# def urlpatch(func):
# @wraps(func)
# def inner(self, *args, **kwargs):
# with Registry() as r:
# return func(self, r, *args, **kwargs)
# return inner
#
# Path: binstar_client/utils/test/utils.py
# def data_dir(filename):
# base_path = inspect.stack()[1][1] # function caller path
# test_data = os.path.join(os.path.dirname(base_path), 'data')
# return os.path.join(test_data, filename)
. Output only the next line. | expanduser.return_value = data_dir('netrc') |
Here is a snippet: <|code_start|> 'description': 'Contains all the code examples in the book "R for Dummies" (1st\n edition). '
'You can view the table of contents as well as the sample code for each\n chapter.',
'version': '0.1.2',
}
expected_file_data = {
'attrs': {
'NeedsCompilation': 'no',
'suggests': [
'fortunes',
'stringr',
'sos',
'XLConnect',
'reshape2',
'ggplot2',
'foreign',
'lattice',
],
'depends': [],
'type': 'source',
},
'basename': 'rfordummies_0.1.2.tar.gz',
}
class Test(unittest.TestCase):
maxDiff = None
def test_r(self):
filename = data_dir('rfordummies_0.1.2.tar.gz')
with open(filename, 'rb') as fd:
<|code_end|>
. Write the next line using the current file imports:
import unittest
from binstar_client.inspect_package import r
from binstar_client.utils.test.utils import data_dir
and context from other files:
# Path: binstar_client/inspect_package/r.py
# def parse_package_list(package_spec):
# def inspect_r_package(filename, fileobj, *args, **kwargs):
#
# Path: binstar_client/utils/test/utils.py
# def data_dir(filename):
# base_path = inspect.stack()[1][1] # function caller path
# test_data = os.path.join(os.path.dirname(base_path), 'data')
# return os.path.join(test_data, filename)
, which may include functions, classes, or code. Output only the next line. | package_data, version_data, file_data = r.inspect_r_package(filename, fd) |
Here is a snippet: <|code_start|>
expected_version_data = {
'description': 'Contains all the code examples in the book "R for Dummies" (1st\n edition). '
'You can view the table of contents as well as the sample code for each\n chapter.',
'version': '0.1.2',
}
expected_file_data = {
'attrs': {
'NeedsCompilation': 'no',
'suggests': [
'fortunes',
'stringr',
'sos',
'XLConnect',
'reshape2',
'ggplot2',
'foreign',
'lattice',
],
'depends': [],
'type': 'source',
},
'basename': 'rfordummies_0.1.2.tar.gz',
}
class Test(unittest.TestCase):
maxDiff = None
def test_r(self):
<|code_end|>
. Write the next line using the current file imports:
import unittest
from binstar_client.inspect_package import r
from binstar_client.utils.test.utils import data_dir
and context from other files:
# Path: binstar_client/inspect_package/r.py
# def parse_package_list(package_spec):
# def inspect_r_package(filename, fileobj, *args, **kwargs):
#
# Path: binstar_client/utils/test/utils.py
# def data_dir(filename):
# base_path = inspect.stack()[1][1] # function caller path
# test_data = os.path.join(os.path.dirname(base_path), 'data')
# return os.path.join(test_data, filename)
, which may include functions, classes, or code. Output only the next line. | filename = data_dir('rfordummies_0.1.2.tar.gz') |
Given the following code snippet before the placeholder: <|code_start|>
class EnvInspector(object):
def __init__(self, filename, fileobj):
self._name = None
self._version = None
self.filename = filename
<|code_end|>
, predict the next line using imports from the current file:
import os
import time
from ..utils.yaml import yaml_load
and context including class names, function names, and sometimes code from other files:
# Path: binstar_client/utils/yaml.py
# def yaml_load(stream):
# """Loads a dictionary from a stream"""
# return safe_load(stream)
. Output only the next line. | self.content = yaml_load(fileobj) |
Next line prediction: <|code_start|>from __future__ import print_function, absolute_import, unicode_literals
try:
except ImportError:
logger = logging.getLogger('binstar')
def expandvars(path):
<|code_end|>
. Use current file imports:
(from os.path import exists, join, dirname, isfile, isdir, abspath, expanduser
from string import Template
from urllib import quote_plus
from urllib.parse import quote_plus
from binstar_client.utils.conda import CONDA_PREFIX, CONDA_ROOT
from binstar_client.utils.appdirs import AppDirs, EnvAppDirs
from binstar_client.errors import BinstarError
from .yaml import yaml_load, yaml_dump
from binstar_client import Binstar
import collections
import logging
import os
import stat
import warnings
import itertools)
and context including class names, function names, or small code snippets from other files:
# Path: binstar_client/utils/conda.py
# CONDA_PREFIX = sys.prefix
#
# CONDA_ROOT = get_conda_root()
#
# Path: binstar_client/utils/yaml.py
# def yaml_load(stream):
# """Loads a dictionary from a stream"""
# return safe_load(stream)
#
# def yaml_dump(data, stream=None):
# """Dumps an object to a YAML string"""
# return safe_dump(data, stream=stream, default_flow_style=False)
. Output only the next line. | environ = dict(CONDA_ROOT=CONDA_ROOT, CONDA_PREFIX=CONDA_PREFIX) |
Given snippet: <|code_start|>from __future__ import print_function, absolute_import, unicode_literals
try:
except ImportError:
logger = logging.getLogger('binstar')
def expandvars(path):
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from os.path import exists, join, dirname, isfile, isdir, abspath, expanduser
from string import Template
from urllib import quote_plus
from urllib.parse import quote_plus
from binstar_client.utils.conda import CONDA_PREFIX, CONDA_ROOT
from binstar_client.utils.appdirs import AppDirs, EnvAppDirs
from binstar_client.errors import BinstarError
from .yaml import yaml_load, yaml_dump
from binstar_client import Binstar
import collections
import logging
import os
import stat
import warnings
import itertools
and context:
# Path: binstar_client/utils/conda.py
# CONDA_PREFIX = sys.prefix
#
# CONDA_ROOT = get_conda_root()
#
# Path: binstar_client/utils/yaml.py
# def yaml_load(stream):
# """Loads a dictionary from a stream"""
# return safe_load(stream)
#
# def yaml_dump(data, stream=None):
# """Dumps an object to a YAML string"""
# return safe_dump(data, stream=stream, default_flow_style=False)
which might include code, classes, or functions. Output only the next line. | environ = dict(CONDA_ROOT=CONDA_ROOT, CONDA_PREFIX=CONDA_PREFIX) |
Given the following code snippet before the placeholder: <|code_start|>def load_token(url):
for token_dir in TOKEN_DIRS:
tokenfile = join(token_dir, '%s.token' % quote_plus(url))
if isfile(tokenfile):
logger.debug("Found login token: {}".format(tokenfile))
with open(tokenfile) as fd:
token = fd.read().strip()
if token:
return token
else:
logger.debug("Token file is empty: {}".format(tokenfile))
logger.debug("Removing file: {}".format(tokenfile))
os.unlink(tokenfile)
def remove_token(args):
config = get_config(site=args and args.site)
url = config.get('url', DEFAULT_URL)
for token_dir in TOKEN_DIRS:
tokenfile = join(token_dir, '%s.token' % quote_plus(url))
if isfile(tokenfile):
os.unlink(tokenfile)
def load_config(config_file):
if exists(config_file):
with open(config_file) as fd:
<|code_end|>
, predict the next line using imports from the current file:
from os.path import exists, join, dirname, isfile, isdir, abspath, expanduser
from string import Template
from urllib import quote_plus
from urllib.parse import quote_plus
from binstar_client.utils.conda import CONDA_PREFIX, CONDA_ROOT
from binstar_client.utils.appdirs import AppDirs, EnvAppDirs
from binstar_client.errors import BinstarError
from .yaml import yaml_load, yaml_dump
from binstar_client import Binstar
import collections
import logging
import os
import stat
import warnings
import itertools
and context including class names, function names, and sometimes code from other files:
# Path: binstar_client/utils/conda.py
# CONDA_PREFIX = sys.prefix
#
# CONDA_ROOT = get_conda_root()
#
# Path: binstar_client/utils/yaml.py
# def yaml_load(stream):
# """Loads a dictionary from a stream"""
# return safe_load(stream)
#
# def yaml_dump(data, stream=None):
# """Dumps an object to a YAML string"""
# return safe_dump(data, stream=stream, default_flow_style=False)
. Output only the next line. | data = yaml_load(fd) |
Given snippet: <|code_start|>def get_config(site=None):
config = DEFAULT_CONFIG.copy()
file_configs = load_file_configs(SEARCH_PATH)
for fn in file_configs:
recursive_update(config, file_configs[fn])
site = site or config.get('default_site')
sites = config.get('sites', {})
if site:
site = str(site)
if site not in sites:
logger.warning('Site alias "%s" does not exist in the config file', site)
else:
# This takes whatever keys are set for the site into the top level of the config dict
recursive_update(config, sites.get(site, {}))
return config
def save_config(data, config_file):
data_dir = dirname(config_file)
try:
if not exists(data_dir):
os.makedirs(data_dir)
with open(config_file, 'w') as fd:
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from os.path import exists, join, dirname, isfile, isdir, abspath, expanduser
from string import Template
from urllib import quote_plus
from urllib.parse import quote_plus
from binstar_client.utils.conda import CONDA_PREFIX, CONDA_ROOT
from binstar_client.utils.appdirs import AppDirs, EnvAppDirs
from binstar_client.errors import BinstarError
from .yaml import yaml_load, yaml_dump
from binstar_client import Binstar
import collections
import logging
import os
import stat
import warnings
import itertools
and context:
# Path: binstar_client/utils/conda.py
# CONDA_PREFIX = sys.prefix
#
# CONDA_ROOT = get_conda_root()
#
# Path: binstar_client/utils/yaml.py
# def yaml_load(stream):
# """Loads a dictionary from a stream"""
# return safe_load(stream)
#
# def yaml_dump(data, stream=None):
# """Dumps an object to a YAML string"""
# return safe_dump(data, stream=stream, default_flow_style=False)
which might include code, classes, or functions. Output only the next line. | yaml_dump(data, stream=fd) |
Here is a snippet: <|code_start|>from __future__ import unicode_literals
class Test(CLITestCase):
def test_write_env(self):
tmpdir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, tmpdir)
with mock.patch('binstar_client.commands.config.USER_CONFIG', join(tmpdir, 'config.yaml')), \
mock.patch('binstar_client.commands.config.SEARCH_PATH', [tmpdir]):
<|code_end|>
. Write the next line using the current file imports:
import os
import shutil
import tempfile
import mock
from os.path import join, exists
from operator import delitem
from binstar_client.scripts.cli import main
from binstar_client.tests.fixture import CLITestCase
and context from other files:
# Path: binstar_client/scripts/cli.py
# def main(args=None, exit=True):
# binstar_main(command_module, args, exit,
# description=__doc__, version=version)
#
# Path: binstar_client/tests/fixture.py
# class CLITestCase(unittest.TestCase):
# def setUp(self):
# self.get_config_patch = mock.patch('binstar_client.utils.get_config')
# self.get_config = self.get_config_patch.start()
# self.get_config.return_value = {}
#
# self.load_token_patch = mock.patch('binstar_client.utils.config.load_token')
# self.load_token = self.load_token_patch.start()
# self.load_token.return_value = '123'
#
# self.store_token_patch = mock.patch('binstar_client.utils.config.store_token')
# self.store_token = self.store_token_patch.start()
#
# self.setup_logging_patch = mock.patch('binstar_client.scripts.cli._setup_logging')
# self.setup_logging_patch.start()
#
# self.logger = logger = logging.getLogger('binstar')
# logger.setLevel(logging.INFO)
# self.stream = AnyIO()
# self.hndlr = hndlr = logging.StreamHandler(stream=self.stream)
# hndlr.setLevel(logging.INFO)
# logger.addHandler(hndlr)
#
# def tearDown(self):
# self.setup_logging_patch.stop()
# self.get_config_patch.stop()
# self.load_token_patch.stop()
# self.store_token_patch.stop()
#
# self.logger.removeHandler(self.hndlr)
, which may include functions, classes, or code. Output only the next line. | main(['config', '--set', 'url', 'http://localhost:5000'], False) |
Using the snippet: <|code_start|> with open(self.doc_pfile.fullpath) as docfile:
metadata['readme'] = docfile.read()
return metadata
def has_doc(self):
def is_readme(basename, relativepath, fullpath):
return basename == relativepath and basename in self.valid_names
for pfile in self.pfiles:
if pfile.validate(is_readme):
self.doc_pfile = pfile
break
return self.doc_pfile is not None
class ConfigurationInspector(object):
valid_names = [
'project.yml',
'project.yaml'
]
def __init__(self, pfiles):
self.pfiles = pfiles
self.config_pfile = None
def update(self, metadata):
try:
if self.has_config():
with open(self.config_pfile.fullpath) as configfile:
<|code_end|>
, determine the next line of code. You have imports:
import logging
from ..yaml import yaml_load
and context (class names, function names, or code) available:
# Path: binstar_client/utils/yaml.py
# def yaml_load(stream):
# """Loads a dictionary from a stream"""
# return safe_load(stream)
. Output only the next line. | metadata['configuration'] = yaml_load(configfile) |
Based on the snippet: <|code_start|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
class InspectIPYNBPackageTest(unittest.TestCase):
def test_package_data(self):
with open(data_dir('notebook.ipynb')) as fd:
<|code_end|>
, predict the immediate next line with the help of imports:
import unittest
from collections import namedtuple
from freezegun import freeze_time
from binstar_client.inspect_package.ipynb import inspect_ipynb_package
from binstar_client.utils.test.utils import data_dir
and context (classes, functions, sometimes code) from other files:
# Path: binstar_client/inspect_package/ipynb.py
# def inspect_ipynb_package(filename, fileobj, *args, **kwargs):
# notebook = nbformat.read(fileobj, nbformat.NO_CONVERT)
# summary = notebook['metadata'].get('summary', 'Jupyter Notebook')
# description = notebook['metadata'].get('description', 'Jupyter Notebook')
#
# package_data = {
# 'name': re.sub('\-ipynb$', '', parameterize(os.path.basename(filename))),
# 'summary': summary,
# 'description': description,
# }
#
# if 'parser_args' in kwargs and kwargs['parser_args'].thumbnail:
# package_data['thumbnail'] = data_uri_from(kwargs['parser_args'].thumbnail)
#
# release_data = {
# 'version': datetime.now().strftime('%Y.%m.%d.%H%M'),
# 'summary': summary,
# 'description': description,
# }
#
# file_data = {
# 'basename': os.path.basename(filename),
# 'attrs': {}
# }
#
# return package_data, release_data, file_data
#
# Path: binstar_client/utils/test/utils.py
# def data_dir(filename):
# base_path = inspect.stack()[1][1] # function caller path
# test_data = os.path.join(os.path.dirname(base_path), 'data')
# return os.path.join(test_data, filename)
. Output only the next line. | package_data, _, _ = inspect_ipynb_package('notebook.ipynb', fd) |
Predict the next line for this snippet: <|code_start|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
class InspectIPYNBPackageTest(unittest.TestCase):
def test_package_data(self):
<|code_end|>
with the help of current file imports:
import unittest
from collections import namedtuple
from freezegun import freeze_time
from binstar_client.inspect_package.ipynb import inspect_ipynb_package
from binstar_client.utils.test.utils import data_dir
and context from other files:
# Path: binstar_client/inspect_package/ipynb.py
# def inspect_ipynb_package(filename, fileobj, *args, **kwargs):
# notebook = nbformat.read(fileobj, nbformat.NO_CONVERT)
# summary = notebook['metadata'].get('summary', 'Jupyter Notebook')
# description = notebook['metadata'].get('description', 'Jupyter Notebook')
#
# package_data = {
# 'name': re.sub('\-ipynb$', '', parameterize(os.path.basename(filename))),
# 'summary': summary,
# 'description': description,
# }
#
# if 'parser_args' in kwargs and kwargs['parser_args'].thumbnail:
# package_data['thumbnail'] = data_uri_from(kwargs['parser_args'].thumbnail)
#
# release_data = {
# 'version': datetime.now().strftime('%Y.%m.%d.%H%M'),
# 'summary': summary,
# 'description': description,
# }
#
# file_data = {
# 'basename': os.path.basename(filename),
# 'attrs': {}
# }
#
# return package_data, release_data, file_data
#
# Path: binstar_client/utils/test/utils.py
# def data_dir(filename):
# base_path = inspect.stack()[1][1] # function caller path
# test_data = os.path.join(os.path.dirname(base_path), 'data')
# return os.path.join(test_data, filename)
, which may contain function names, class names, or code. Output only the next line. | with open(data_dir('notebook.ipynb')) as fd: |
Next line prediction: <|code_start|> # recipe.index is deprecated and only packages built with older
# versions of conda-build contain that file.
recipe = tar.extractfile(info)
recipe = json.loads(recipe.read().decode())
about = recipe.pop('about', {})
elif info.name == 'info/about.json':
# recipe.json is deprecated and only packages build with older
# versions of conda-build contain that file.
about = tar.extractfile(info)
about = json.loads(about.read().decode())
elif info.name == 'info/has_prefix':
has_prefix = True
if index is not None and about != {}:
break
else:
if index is None:
raise TypeError("info/index.json required in conda package")
# Load icon defined in the index.json and file exists inside info folder
fileobj.seek(0)
icon_b64 = None
icon_path = index.get('icon')
if icon_path:
tar = tarfile.open(filename, fileobj=fileobj, mode="r|bz2")
for info in tar:
if info.name == 'info/{0}'.format(icon_path):
icon_data = tar.extractfile(info).read()
f, temp_path = tempfile.mkstemp()
with open(temp_path, 'wb') as f:
f.write(icon_data)
<|code_end|>
. Use current file imports:
(from os import path
from pprint import pprint
from ..utils.notebook.data_uri import data_uri_from
import json
import re
import sys
import tarfile
import tempfile)
and context including class names, function names, or small code snippets from other files:
# Path: binstar_client/utils/notebook/data_uri.py
# def data_uri_from(location):
# return DataURIConverter(location)()
. Output only the next line. | icon_b64 = data_uri_from(temp_path) |
Continue the code snippet: <|code_start|>from __future__ import unicode_literals
# Standard library imports
# Local imports
class Test(CLITestCase):
@urlpatch
def test_copy_label(self, urls):
urls.register(method='GET', path='/channels/u1', content='["dev"]')
copy = urls.register(
method='POST', path='/copy/package/u1/p1/1.0/', content='[{"basename": "copied-file_1.0.tgz"}]')
<|code_end|>
. Use current file imports:
import json
import unittest
from binstar_client.errors import Conflict
from binstar_client.scripts.cli import main
from binstar_client.tests.urlmock import urlpatch
from binstar_client.tests.fixture import CLITestCase
and context (classes, functions, or code) from other files:
# Path: binstar_client/scripts/cli.py
# def main(args=None, exit=True):
# binstar_main(command_module, args, exit,
# description=__doc__, version=version)
#
# Path: binstar_client/tests/urlmock.py
# def urlpatch(func):
# @wraps(func)
# def inner(self, *args, **kwargs):
# with Registry() as r:
# return func(self, r, *args, **kwargs)
# return inner
#
# Path: binstar_client/tests/fixture.py
# class CLITestCase(unittest.TestCase):
# def setUp(self):
# self.get_config_patch = mock.patch('binstar_client.utils.get_config')
# self.get_config = self.get_config_patch.start()
# self.get_config.return_value = {}
#
# self.load_token_patch = mock.patch('binstar_client.utils.config.load_token')
# self.load_token = self.load_token_patch.start()
# self.load_token.return_value = '123'
#
# self.store_token_patch = mock.patch('binstar_client.utils.config.store_token')
# self.store_token = self.store_token_patch.start()
#
# self.setup_logging_patch = mock.patch('binstar_client.scripts.cli._setup_logging')
# self.setup_logging_patch.start()
#
# self.logger = logger = logging.getLogger('binstar')
# logger.setLevel(logging.INFO)
# self.stream = AnyIO()
# self.hndlr = hndlr = logging.StreamHandler(stream=self.stream)
# hndlr.setLevel(logging.INFO)
# logger.addHandler(hndlr)
#
# def tearDown(self):
# self.setup_logging_patch.stop()
# self.get_config_patch.stop()
# self.load_token_patch.stop()
# self.store_token_patch.stop()
#
# self.logger.removeHandler(self.hndlr)
. Output only the next line. | main(['--show-traceback', 'copy', '--from-label', 'dev', '--to-label', 'release/xyz', 'u1/p1/1.0'], False) |
Given snippet: <|code_start|>from __future__ import absolute_import, print_function, unicode_literals
def inspect_ipynb_package(filename, fileobj, *args, **kwargs):
notebook = nbformat.read(fileobj, nbformat.NO_CONVERT)
summary = notebook['metadata'].get('summary', 'Jupyter Notebook')
description = notebook['metadata'].get('description', 'Jupyter Notebook')
package_data = {
'name': re.sub('\-ipynb$', '', parameterize(os.path.basename(filename))),
'summary': summary,
'description': description,
}
if 'parser_args' in kwargs and kwargs['parser_args'].thumbnail:
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import os
import re
import time
import nbformat
from datetime import datetime
from ..utils.notebook.data_uri import data_uri_from
from ..utils.notebook.inflection import parameterize
and context:
# Path: binstar_client/utils/notebook/data_uri.py
# def data_uri_from(location):
# return DataURIConverter(location)()
which might include code, classes, or functions. Output only the next line. | package_data['thumbnail'] = data_uri_from(kwargs['parser_args'].thumbnail) |
Next line prediction: <|code_start|>from __future__ import unicode_literals
class Test(CLITestCase):
@urlpatch
def test_remove_token_from_org(self, urls):
remove_token = urls.register(
method='DELETE',
path='/authentications/org/orgname/name/tokenname',
content='{"token": "a-token"}',
status=201
)
<|code_end|>
. Use current file imports:
(from binstar_client.scripts.cli import main
from binstar_client.tests.fixture import CLITestCase
from binstar_client.tests.urlmock import urlpatch
from binstar_client.errors import BinstarError
from mock import patch
import unittest)
and context including class names, function names, or small code snippets from other files:
# Path: binstar_client/scripts/cli.py
# def main(args=None, exit=True):
# binstar_main(command_module, args, exit,
# description=__doc__, version=version)
#
# Path: binstar_client/tests/fixture.py
# class CLITestCase(unittest.TestCase):
# def setUp(self):
# self.get_config_patch = mock.patch('binstar_client.utils.get_config')
# self.get_config = self.get_config_patch.start()
# self.get_config.return_value = {}
#
# self.load_token_patch = mock.patch('binstar_client.utils.config.load_token')
# self.load_token = self.load_token_patch.start()
# self.load_token.return_value = '123'
#
# self.store_token_patch = mock.patch('binstar_client.utils.config.store_token')
# self.store_token = self.store_token_patch.start()
#
# self.setup_logging_patch = mock.patch('binstar_client.scripts.cli._setup_logging')
# self.setup_logging_patch.start()
#
# self.logger = logger = logging.getLogger('binstar')
# logger.setLevel(logging.INFO)
# self.stream = AnyIO()
# self.hndlr = hndlr = logging.StreamHandler(stream=self.stream)
# hndlr.setLevel(logging.INFO)
# logger.addHandler(hndlr)
#
# def tearDown(self):
# self.setup_logging_patch.stop()
# self.get_config_patch.stop()
# self.load_token_patch.stop()
# self.store_token_patch.stop()
#
# self.logger.removeHandler(self.hndlr)
#
# Path: binstar_client/tests/urlmock.py
# def urlpatch(func):
# @wraps(func)
# def inner(self, *args, **kwargs):
# with Registry() as r:
# return func(self, r, *args, **kwargs)
# return inner
. Output only the next line. | main(['--show-traceback', 'auth', '--remove', 'tokenname', '-o', 'orgname'], False) |
Predict the next line for this snippet: <|code_start|>'''
CERTitude: the seeker of IOC
Copyright (c) 2016 CERT-W
Contact: cert@wavestone.com
Contributors: @iansus, @nervous, @fschwebel
CERTitude is under licence GPL-2.0:
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
'''
threadname = uuid.uuid4().hex[:6]
loggingserver = logging.getLogger('api')
def hashPassword(password):
s = sha256()
<|code_end|>
with the help of current file imports:
import logging
import uuid
from hashlib import sha256, sha512
from config import INTERFACE_HASH_SALT
and context from other files:
# Path: config.py
# INTERFACE_HASH_SALT = '' # nocommit
, which may contain function names, class names, or code. Output only the next line. | s.update(INTERFACE_HASH_SALT) |
Based on the snippet: <|code_start|> Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
'''
parser = argparse.ArgumentParser(
description='CERTitude, the modular Python scanner, network mapper and IOC Seeker'
)
parser.add_argument(
'command',
type=str,
nargs=1,
help="command to run ('init', 'run')"
)
parser.add_argument(
'-c',
'--component',
type=str,
default='0',
nargs=1,
help="component to run ('interface', 'iocscan')"
)
parser.add_argument(
'-b',
'--batch-name',
type=str,
help='[iocscan] Specify batch name'
)
<|code_end|>
, predict the immediate next line with the help of imports:
import argparse, sys
from helpers import log
from helpers import init
from components.interface import web
from components.scanner import iocscan_queue
from components.scanner import hashscan_queue
and context (classes, functions, sometimes code) from other files:
# Path: helpers/log.py
# def init():
. Output only the next line. | log.init() |
Next line prediction: <|code_start|> if self.__bypass:
return evltResult.UNDEF
# private attribute for child class
rc = self.getRemoteCommand()
useWorkingDirectory = True
result = {}
ioc_list = self.filter_ioc_list(ioc_list)
if len(ioc_list)==0:
return result
file_name, file_content = self.file_from_ioc_list(ioc_list)
self.log('Loading file %s' % file_name, logging.DEBUG)
with open(value_file, 'w') as f:
f.write(file_content)
if self.__confidential:
raise NotImplementedError
# Local SQLITE3 instance
# TODO: do it
# sqlite3loc = os.path.join(LOCAL_ANALYSIS_DIR, 'sqlite3.exe')
# dbloc = os.path.join(self.dirname, self.__dbName)
# localcommand = 'type "%s" | "%s" "%s"' % (value_file, sqlite3loc, dbloc)
# res = os.popen(localcommand).read().replace('\r', '').replace('\n', '')
else:
rc.dropFile(value_file, file_name, False)
<|code_end|>
. Use current file imports:
(import logging
import os
import sys
import tarfile
import result as evltResult
from helpers.helpers import threadname)
and context including class names, function names, or small code snippets from other files:
# Path: helpers/helpers.py
# def hashPassword(password):
# def checksum(data):
# def verifyPassword(p):
# MIN = False
# MAJ = False
# NUM = False
# SPEC = False
# MIN = True
# MAJ = True
# NUM = True
# SPEC = True
. Output only the next line. | file_identifier = '%s.%s' % (threadname, ioc_list[0].document.lower()) |
Given the following code snippet before the placeholder: <|code_start|>
CERTitude is under licence GPL-2.0:
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
'''
def init():
try:
chemin = path.dirname(path.abspath(__file__))
except:
chemin = "" # relatif
logging.basicConfig(filename=path.join(chemin, '..', LOG_DIRECTORY, 'certitude-core.log'), format=FORMAT_LOGS, filemode='a')
formatter = logging.Formatter(FORMAT_LOGS)
<|code_end|>
, predict the next line using imports from the current file:
import logging
from os import path
from config import DEBUG, CONSOLE_DEBUG_LEVEL, LOG_DIRECTORY, FORMAT_LOGS
and context including class names, function names, and sometimes code from other files:
# Path: config.py
# DEBUG = True
#
# CONSOLE_DEBUG_LEVEL = logging.DEBUG
#
# LOG_DIRECTORY = '_log'
#
# FORMAT_LOGS = '%(asctime)s %(name)-14s %(levelname)-8s %(message)s'
. Output only the next line. | if DEBUG: |
Predict the next line after this snippet: <|code_start|> logging.getLogger('').setLevel(logging.INFO)
# Database
loggingdb = logging.getLogger('sqlalchemy.engine')
loggingdb.setLevel(logging.WARNING)
handler_logdb = logging.FileHandler(path.join(chemin, '..', LOG_DIRECTORY, 'db.log'))
handler_logdb.setFormatter(formatter)
loggingdb.addHandler(handler_logdb)
# API Server
loggingserver = logging.getLogger('api')
handler_logapi = logging.FileHandler(path.join(chemin, '..', LOG_DIRECTORY, 'api.log'))
handler_logapi.setFormatter(formatter)
loggingserver.addHandler(handler_logapi)
# IOCScanners
loggingiocscan = logging.getLogger('iocscanner')
handler_logiocscan = logging.FileHandler(path.join(chemin, '..', LOG_DIRECTORY, 'iocscanners.log'))
handler_logiocscan.setFormatter(formatter)
loggingiocscan.addHandler(handler_logiocscan)
# Hashscanners
logginghashscan = logging.getLogger('hashscanner')
handler_loghashscan = logging.FileHandler(path.join(chemin, '..', LOG_DIRECTORY, 'hashscanners.log'))
handler_loghashscan.setFormatter(formatter)
logginghashscan.addHandler(handler_loghashscan)
# Console output
# define a Handler which writes INFO messages or higher to the sys.stderr
console = logging.StreamHandler()
<|code_end|>
using the current file's imports:
import logging
from os import path
from config import DEBUG, CONSOLE_DEBUG_LEVEL, LOG_DIRECTORY, FORMAT_LOGS
and any relevant context from other files:
# Path: config.py
# DEBUG = True
#
# CONSOLE_DEBUG_LEVEL = logging.DEBUG
#
# LOG_DIRECTORY = '_log'
#
# FORMAT_LOGS = '%(asctime)s %(name)-14s %(levelname)-8s %(message)s'
. Output only the next line. | console.setLevel(CONSOLE_DEBUG_LEVEL) |
Using the snippet: <|code_start|>
Contact: cert@wavestone.com
Contributors: @iansus, @nervous, @fschwebel
CERTitude is under licence GPL-2.0:
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
'''
def init():
try:
chemin = path.dirname(path.abspath(__file__))
except:
chemin = "" # relatif
<|code_end|>
, determine the next line of code. You have imports:
import logging
from os import path
from config import DEBUG, CONSOLE_DEBUG_LEVEL, LOG_DIRECTORY, FORMAT_LOGS
and context (class names, function names, or code) available:
# Path: config.py
# DEBUG = True
#
# CONSOLE_DEBUG_LEVEL = logging.DEBUG
#
# LOG_DIRECTORY = '_log'
#
# FORMAT_LOGS = '%(asctime)s %(name)-14s %(levelname)-8s %(message)s'
. Output only the next line. | logging.basicConfig(filename=path.join(chemin, '..', LOG_DIRECTORY, 'certitude-core.log'), format=FORMAT_LOGS, filemode='a') |
Given the code snippet: <|code_start|>
Contact: cert@wavestone.com
Contributors: @iansus, @nervous, @fschwebel
CERTitude is under licence GPL-2.0:
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
'''
def init():
try:
chemin = path.dirname(path.abspath(__file__))
except:
chemin = "" # relatif
<|code_end|>
, generate the next line using the imports in this file:
import logging
from os import path
from config import DEBUG, CONSOLE_DEBUG_LEVEL, LOG_DIRECTORY, FORMAT_LOGS
and context (functions, classes, or occasionally code) from other files:
# Path: config.py
# DEBUG = True
#
# CONSOLE_DEBUG_LEVEL = logging.DEBUG
#
# LOG_DIRECTORY = '_log'
#
# FORMAT_LOGS = '%(asctime)s %(name)-14s %(levelname)-8s %(message)s'
. Output only the next line. | logging.basicConfig(filename=path.join(chemin, '..', LOG_DIRECTORY, 'certitude-core.log'), format=FORMAT_LOGS, filemode='a') |
Given snippet: <|code_start|>
try:
args = curdoc().session_context.request.arguments
batch_id = int(args.get('batchid')[0])
except:
print 'none specified - setting batch id to 1..'
batch_id = 1
def getInfosFromXML(content):
c = base64.b64decode(content)
r = {'guids': {}, 'totalguids': 0}
xml = ET.fromstring(c)
openiocparser.removeNS(xml)
for indic in xml.iter('IndicatorItem'):
guid = indic.attrib['id']
context = indic.findall('Context')[0]
search = context.attrib['search']
content = indic.findall('Content')[0]
value = content.text
r['guids'][guid] = {'search': search, 'value': value}
r['totalguids'] += 1
return r
def getDataframeFromBatchid(batchid):
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import pandas as pd
import io
import base64
import components.scanner.openioc.openiocparser as openiocparser
import xml.etree.ElementTree as ET
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from config import CERTITUDE_DATABASE, LISTEN_ADDRESS, LISTEN_PORT
from helpers.queue_models import Task
from helpers.results_models import Result, IOCDetection
from helpers.misc_models import ConfigurationProfile, XMLIOC, Batch
from bokeh.layouts import row, widgetbox, layout
from bokeh.models import Select, Slider, DataTable, TableColumn, ColumnDataSource
from bokeh.models import ColumnDataSource, BoxSelectTool, ResetTool, Div, HTMLTemplateFormatter
from bokeh.plotting import curdoc, figure
and context:
# Path: config.py
# CERTITUDE_DATABASE = "sqlite:///data.db"
#
# LISTEN_ADDRESS = '127.0.0.1'
#
# LISTEN_PORT = 5000
#
# Path: helpers/results_models.py
# class Result(Base):
# __tablename__ = 'resultats'
#
# id = Column(Integer, primary_key=True)
# tache_id = Column(Integer, ForeignKey('queue.id'))
# finished = Column(DateTime, default=datetime.now)
#
# up = Column(Boolean)
# blocked = Column(Boolean)
# ip = Column(String)
# hostname = Column(String)
# smbreachable = Column(Boolean)
#
# class IOCDetection(Base):
# __tablename__ = 'iocdetections'
#
# id = Column(Integer, primary_key=True)
# result_id = Column(Integer, ForeignKey('resultats.id'))
# resultat = relationship(Result,
# backref='iocdetections',)
# xmlioc_id = Column(Integer)
# indicator_id = Column(String) # Length should be 62
# indicator_data = Column(String)
#
# Path: helpers/misc_models.py
# class ConfigurationProfile(Base):
# __tablename__ = 'configuration_profiles'
#
# id = Column(Integer, primary_key=True)
# name = Column(String)
# host_confidential = Column(Boolean, default=False)
# ioc_list = Column(String)
# yara_list = Column(String)
#
# class XMLIOC(Base):
# __tablename__ = 'xmliocs'
#
# id = Column(Integer, primary_key=True)
# name = Column(String)
# date_added = Column(DateTime, default=datetime.now)
# xml_content = Column(String)
#
# class Batch(Base):
# __tablename__ = 'batches'
#
# id = Column(Integer, primary_key = True)
# name = Column(String)
# configuration_profile_id = Column(Integer)
# windows_credential_id = Column(Integer)
which might include code, classes, or functions. Output only the next line. | engine = create_engine(CERTITUDE_DATABASE, echo=False) |
Given the following code snippet before the placeholder: <|code_start|> panda_response = ['%d' % result.id, '%s' % task.ip, '%s' % result.smbreachable,
'%s' % task.iocscanned, '%s' % task.hashscanned, '%s' % task.commentaire,
'%s' % mname, float('%.2f' % mval)]
# Complete detection / IOC
for id in all_iocs:
panda_response.append(float('%.2f' % result_for_host[id]))
coldata.append(panda_response)
# df.loc[len(df)] = panda_response
return pd.DataFrame(coldata, columns = columns)
pandata = getDataframeFromBatchid(batch_id)
pandata.fillna('None', inplace=True) # just replace missing values with zero
source = ColumnDataSource(pandata)
SIZES = list(range(6, 22, 3))
columns = [
TableColumn(field="HostnameIP", title="Address", width=450),
TableColumn(field="Malware", title="Malware"),
TableColumn(field="Compromise", title="Compromise"),
TableColumn(field="Lookup:Success", title="Success"),
TableColumn(field="Lookup:IOCScanned", title="IOCScanned"),
TableColumn(field="Lookup:HashScanned", title="HashScanned"),
TableColumn(field="Lookup:Subnet", title="Subnet"),
TableColumn(field='HostId', title='Result',
formatter=HTMLTemplateFormatter(
<|code_end|>
, predict the next line using imports from the current file:
import pandas as pd
import io
import base64
import components.scanner.openioc.openiocparser as openiocparser
import xml.etree.ElementTree as ET
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from config import CERTITUDE_DATABASE, LISTEN_ADDRESS, LISTEN_PORT
from helpers.queue_models import Task
from helpers.results_models import Result, IOCDetection
from helpers.misc_models import ConfigurationProfile, XMLIOC, Batch
from bokeh.layouts import row, widgetbox, layout
from bokeh.models import Select, Slider, DataTable, TableColumn, ColumnDataSource
from bokeh.models import ColumnDataSource, BoxSelectTool, ResetTool, Div, HTMLTemplateFormatter
from bokeh.plotting import curdoc, figure
and context including class names, function names, and sometimes code from other files:
# Path: config.py
# CERTITUDE_DATABASE = "sqlite:///data.db"
#
# LISTEN_ADDRESS = '127.0.0.1'
#
# LISTEN_PORT = 5000
#
# Path: helpers/results_models.py
# class Result(Base):
# __tablename__ = 'resultats'
#
# id = Column(Integer, primary_key=True)
# tache_id = Column(Integer, ForeignKey('queue.id'))
# finished = Column(DateTime, default=datetime.now)
#
# up = Column(Boolean)
# blocked = Column(Boolean)
# ip = Column(String)
# hostname = Column(String)
# smbreachable = Column(Boolean)
#
# class IOCDetection(Base):
# __tablename__ = 'iocdetections'
#
# id = Column(Integer, primary_key=True)
# result_id = Column(Integer, ForeignKey('resultats.id'))
# resultat = relationship(Result,
# backref='iocdetections',)
# xmlioc_id = Column(Integer)
# indicator_id = Column(String) # Length should be 62
# indicator_data = Column(String)
#
# Path: helpers/misc_models.py
# class ConfigurationProfile(Base):
# __tablename__ = 'configuration_profiles'
#
# id = Column(Integer, primary_key=True)
# name = Column(String)
# host_confidential = Column(Boolean, default=False)
# ioc_list = Column(String)
# yara_list = Column(String)
#
# class XMLIOC(Base):
# __tablename__ = 'xmliocs'
#
# id = Column(Integer, primary_key=True)
# name = Column(String)
# date_added = Column(DateTime, default=datetime.now)
# xml_content = Column(String)
#
# class Batch(Base):
# __tablename__ = 'batches'
#
# id = Column(Integer, primary_key = True)
# name = Column(String)
# configuration_profile_id = Column(Integer)
# windows_credential_id = Column(Integer)
. Output only the next line. | template='<a href="http://%s:%d/host-result/<%%= value %%>" target="_blank">#<%%= value %%></a>' % (LISTEN_ADDRESS, LISTEN_PORT))) |
Continue the code snippet: <|code_start|> panda_response = ['%d' % result.id, '%s' % task.ip, '%s' % result.smbreachable,
'%s' % task.iocscanned, '%s' % task.hashscanned, '%s' % task.commentaire,
'%s' % mname, float('%.2f' % mval)]
# Complete detection / IOC
for id in all_iocs:
panda_response.append(float('%.2f' % result_for_host[id]))
coldata.append(panda_response)
# df.loc[len(df)] = panda_response
return pd.DataFrame(coldata, columns = columns)
pandata = getDataframeFromBatchid(batch_id)
pandata.fillna('None', inplace=True) # just replace missing values with zero
source = ColumnDataSource(pandata)
SIZES = list(range(6, 22, 3))
columns = [
TableColumn(field="HostnameIP", title="Address", width=450),
TableColumn(field="Malware", title="Malware"),
TableColumn(field="Compromise", title="Compromise"),
TableColumn(field="Lookup:Success", title="Success"),
TableColumn(field="Lookup:IOCScanned", title="IOCScanned"),
TableColumn(field="Lookup:HashScanned", title="HashScanned"),
TableColumn(field="Lookup:Subnet", title="Subnet"),
TableColumn(field='HostId', title='Result',
formatter=HTMLTemplateFormatter(
<|code_end|>
. Use current file imports:
import pandas as pd
import io
import base64
import components.scanner.openioc.openiocparser as openiocparser
import xml.etree.ElementTree as ET
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from config import CERTITUDE_DATABASE, LISTEN_ADDRESS, LISTEN_PORT
from helpers.queue_models import Task
from helpers.results_models import Result, IOCDetection
from helpers.misc_models import ConfigurationProfile, XMLIOC, Batch
from bokeh.layouts import row, widgetbox, layout
from bokeh.models import Select, Slider, DataTable, TableColumn, ColumnDataSource
from bokeh.models import ColumnDataSource, BoxSelectTool, ResetTool, Div, HTMLTemplateFormatter
from bokeh.plotting import curdoc, figure
and context (classes, functions, or code) from other files:
# Path: config.py
# CERTITUDE_DATABASE = "sqlite:///data.db"
#
# LISTEN_ADDRESS = '127.0.0.1'
#
# LISTEN_PORT = 5000
#
# Path: helpers/results_models.py
# class Result(Base):
# __tablename__ = 'resultats'
#
# id = Column(Integer, primary_key=True)
# tache_id = Column(Integer, ForeignKey('queue.id'))
# finished = Column(DateTime, default=datetime.now)
#
# up = Column(Boolean)
# blocked = Column(Boolean)
# ip = Column(String)
# hostname = Column(String)
# smbreachable = Column(Boolean)
#
# class IOCDetection(Base):
# __tablename__ = 'iocdetections'
#
# id = Column(Integer, primary_key=True)
# result_id = Column(Integer, ForeignKey('resultats.id'))
# resultat = relationship(Result,
# backref='iocdetections',)
# xmlioc_id = Column(Integer)
# indicator_id = Column(String) # Length should be 62
# indicator_data = Column(String)
#
# Path: helpers/misc_models.py
# class ConfigurationProfile(Base):
# __tablename__ = 'configuration_profiles'
#
# id = Column(Integer, primary_key=True)
# name = Column(String)
# host_confidential = Column(Boolean, default=False)
# ioc_list = Column(String)
# yara_list = Column(String)
#
# class XMLIOC(Base):
# __tablename__ = 'xmliocs'
#
# id = Column(Integer, primary_key=True)
# name = Column(String)
# date_added = Column(DateTime, default=datetime.now)
# xml_content = Column(String)
#
# class Batch(Base):
# __tablename__ = 'batches'
#
# id = Column(Integer, primary_key = True)
# name = Column(String)
# configuration_profile_id = Column(Integer)
# windows_credential_id = Column(Integer)
. Output only the next line. | template='<a href="http://%s:%d/host-result/<%%= value %%>" target="_blank">#<%%= value %%></a>' % (LISTEN_ADDRESS, LISTEN_PORT))) |
Next line prediction: <|code_start|> return r
def getDataframeFromBatchid(batchid):
engine = create_engine(CERTITUDE_DATABASE, echo=False)
dbsession = sessionmaker(bind=engine)()
columns = ['HostId','HostnameIP' , 'Lookup:Success' , 'Lookup:IOCScanned' , 'Lookup:HashScanned', 'Lookup:Subnet' , 'Malware', 'Compromise']
coldata = []
batch = dbsession.query(Batch).filter_by(id=batchid).first()
if batch is None:
raise Exception('No batch found')
# Get all IOCs
cp = dbsession.query(ConfigurationProfile).filter_by(id=batch.configuration_profile_id).first()
if cp.ioc_list == '':
ioc_list = []
else:
ioc_list = [int(e) for e in cp.ioc_list.split(',')]
iocs = dbsession.query(XMLIOC).filter(XMLIOC.id.in_(ioc_list)).all()
# Complete first line & assoc ioc.id => ioc
all_iocs = {}
for ioc in iocs:
all_iocs[ioc.id] = ioc
columns.append('%s' % ioc.name)
<|code_end|>
. Use current file imports:
(import pandas as pd
import io
import base64
import components.scanner.openioc.openiocparser as openiocparser
import xml.etree.ElementTree as ET
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from config import CERTITUDE_DATABASE, LISTEN_ADDRESS, LISTEN_PORT
from helpers.queue_models import Task
from helpers.results_models import Result, IOCDetection
from helpers.misc_models import ConfigurationProfile, XMLIOC, Batch
from bokeh.layouts import row, widgetbox, layout
from bokeh.models import Select, Slider, DataTable, TableColumn, ColumnDataSource
from bokeh.models import ColumnDataSource, BoxSelectTool, ResetTool, Div, HTMLTemplateFormatter
from bokeh.plotting import curdoc, figure)
and context including class names, function names, or small code snippets from other files:
# Path: config.py
# CERTITUDE_DATABASE = "sqlite:///data.db"
#
# LISTEN_ADDRESS = '127.0.0.1'
#
# LISTEN_PORT = 5000
#
# Path: helpers/results_models.py
# class Result(Base):
# __tablename__ = 'resultats'
#
# id = Column(Integer, primary_key=True)
# tache_id = Column(Integer, ForeignKey('queue.id'))
# finished = Column(DateTime, default=datetime.now)
#
# up = Column(Boolean)
# blocked = Column(Boolean)
# ip = Column(String)
# hostname = Column(String)
# smbreachable = Column(Boolean)
#
# class IOCDetection(Base):
# __tablename__ = 'iocdetections'
#
# id = Column(Integer, primary_key=True)
# result_id = Column(Integer, ForeignKey('resultats.id'))
# resultat = relationship(Result,
# backref='iocdetections',)
# xmlioc_id = Column(Integer)
# indicator_id = Column(String) # Length should be 62
# indicator_data = Column(String)
#
# Path: helpers/misc_models.py
# class ConfigurationProfile(Base):
# __tablename__ = 'configuration_profiles'
#
# id = Column(Integer, primary_key=True)
# name = Column(String)
# host_confidential = Column(Boolean, default=False)
# ioc_list = Column(String)
# yara_list = Column(String)
#
# class XMLIOC(Base):
# __tablename__ = 'xmliocs'
#
# id = Column(Integer, primary_key=True)
# name = Column(String)
# date_added = Column(DateTime, default=datetime.now)
# xml_content = Column(String)
#
# class Batch(Base):
# __tablename__ = 'batches'
#
# id = Column(Integer, primary_key = True)
# name = Column(String)
# configuration_profile_id = Column(Integer)
# windows_credential_id = Column(Integer)
. Output only the next line. | all_tasks_results = dbsession.query(Task, Result).filter(Task.batch_id == batchid).join(Result, |
Continue the code snippet: <|code_start|> if batch is None:
raise Exception('No batch found')
# Get all IOCs
cp = dbsession.query(ConfigurationProfile).filter_by(id=batch.configuration_profile_id).first()
if cp.ioc_list == '':
ioc_list = []
else:
ioc_list = [int(e) for e in cp.ioc_list.split(',')]
iocs = dbsession.query(XMLIOC).filter(XMLIOC.id.in_(ioc_list)).all()
# Complete first line & assoc ioc.id => ioc
all_iocs = {}
for ioc in iocs:
all_iocs[ioc.id] = ioc
columns.append('%s' % ioc.name)
all_tasks_results = dbsession.query(Task, Result).filter(Task.batch_id == batchid).join(Result,
Task.id == Result.tache_id).all()
# Get total indicator items / IOC
total_by_ioc = {}
for ioc in iocs:
infos = getInfosFromXML(ioc.xml_content)
total_by_ioc[ioc.id] = infos['totalguids']
for task, result in all_tasks_results:
<|code_end|>
. Use current file imports:
import pandas as pd
import io
import base64
import components.scanner.openioc.openiocparser as openiocparser
import xml.etree.ElementTree as ET
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from config import CERTITUDE_DATABASE, LISTEN_ADDRESS, LISTEN_PORT
from helpers.queue_models import Task
from helpers.results_models import Result, IOCDetection
from helpers.misc_models import ConfigurationProfile, XMLIOC, Batch
from bokeh.layouts import row, widgetbox, layout
from bokeh.models import Select, Slider, DataTable, TableColumn, ColumnDataSource
from bokeh.models import ColumnDataSource, BoxSelectTool, ResetTool, Div, HTMLTemplateFormatter
from bokeh.plotting import curdoc, figure
and context (classes, functions, or code) from other files:
# Path: config.py
# CERTITUDE_DATABASE = "sqlite:///data.db"
#
# LISTEN_ADDRESS = '127.0.0.1'
#
# LISTEN_PORT = 5000
#
# Path: helpers/results_models.py
# class Result(Base):
# __tablename__ = 'resultats'
#
# id = Column(Integer, primary_key=True)
# tache_id = Column(Integer, ForeignKey('queue.id'))
# finished = Column(DateTime, default=datetime.now)
#
# up = Column(Boolean)
# blocked = Column(Boolean)
# ip = Column(String)
# hostname = Column(String)
# smbreachable = Column(Boolean)
#
# class IOCDetection(Base):
# __tablename__ = 'iocdetections'
#
# id = Column(Integer, primary_key=True)
# result_id = Column(Integer, ForeignKey('resultats.id'))
# resultat = relationship(Result,
# backref='iocdetections',)
# xmlioc_id = Column(Integer)
# indicator_id = Column(String) # Length should be 62
# indicator_data = Column(String)
#
# Path: helpers/misc_models.py
# class ConfigurationProfile(Base):
# __tablename__ = 'configuration_profiles'
#
# id = Column(Integer, primary_key=True)
# name = Column(String)
# host_confidential = Column(Boolean, default=False)
# ioc_list = Column(String)
# yara_list = Column(String)
#
# class XMLIOC(Base):
# __tablename__ = 'xmliocs'
#
# id = Column(Integer, primary_key=True)
# name = Column(String)
# date_added = Column(DateTime, default=datetime.now)
# xml_content = Column(String)
#
# class Batch(Base):
# __tablename__ = 'batches'
#
# id = Column(Integer, primary_key = True)
# name = Column(String)
# configuration_profile_id = Column(Integer)
# windows_credential_id = Column(Integer)
. Output only the next line. | ioc_detections = dbsession.query(IOCDetection).filter_by(result_id=result.id).all() |
Predict the next line for this snippet: <|code_start|>
xml = ET.fromstring(c)
openiocparser.removeNS(xml)
for indic in xml.iter('IndicatorItem'):
guid = indic.attrib['id']
context = indic.findall('Context')[0]
search = context.attrib['search']
content = indic.findall('Content')[0]
value = content.text
r['guids'][guid] = {'search': search, 'value': value}
r['totalguids'] += 1
return r
def getDataframeFromBatchid(batchid):
engine = create_engine(CERTITUDE_DATABASE, echo=False)
dbsession = sessionmaker(bind=engine)()
columns = ['HostId','HostnameIP' , 'Lookup:Success' , 'Lookup:IOCScanned' , 'Lookup:HashScanned', 'Lookup:Subnet' , 'Malware', 'Compromise']
coldata = []
batch = dbsession.query(Batch).filter_by(id=batchid).first()
if batch is None:
raise Exception('No batch found')
# Get all IOCs
<|code_end|>
with the help of current file imports:
import pandas as pd
import io
import base64
import components.scanner.openioc.openiocparser as openiocparser
import xml.etree.ElementTree as ET
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from config import CERTITUDE_DATABASE, LISTEN_ADDRESS, LISTEN_PORT
from helpers.queue_models import Task
from helpers.results_models import Result, IOCDetection
from helpers.misc_models import ConfigurationProfile, XMLIOC, Batch
from bokeh.layouts import row, widgetbox, layout
from bokeh.models import Select, Slider, DataTable, TableColumn, ColumnDataSource
from bokeh.models import ColumnDataSource, BoxSelectTool, ResetTool, Div, HTMLTemplateFormatter
from bokeh.plotting import curdoc, figure
and context from other files:
# Path: config.py
# CERTITUDE_DATABASE = "sqlite:///data.db"
#
# LISTEN_ADDRESS = '127.0.0.1'
#
# LISTEN_PORT = 5000
#
# Path: helpers/results_models.py
# class Result(Base):
# __tablename__ = 'resultats'
#
# id = Column(Integer, primary_key=True)
# tache_id = Column(Integer, ForeignKey('queue.id'))
# finished = Column(DateTime, default=datetime.now)
#
# up = Column(Boolean)
# blocked = Column(Boolean)
# ip = Column(String)
# hostname = Column(String)
# smbreachable = Column(Boolean)
#
# class IOCDetection(Base):
# __tablename__ = 'iocdetections'
#
# id = Column(Integer, primary_key=True)
# result_id = Column(Integer, ForeignKey('resultats.id'))
# resultat = relationship(Result,
# backref='iocdetections',)
# xmlioc_id = Column(Integer)
# indicator_id = Column(String) # Length should be 62
# indicator_data = Column(String)
#
# Path: helpers/misc_models.py
# class ConfigurationProfile(Base):
# __tablename__ = 'configuration_profiles'
#
# id = Column(Integer, primary_key=True)
# name = Column(String)
# host_confidential = Column(Boolean, default=False)
# ioc_list = Column(String)
# yara_list = Column(String)
#
# class XMLIOC(Base):
# __tablename__ = 'xmliocs'
#
# id = Column(Integer, primary_key=True)
# name = Column(String)
# date_added = Column(DateTime, default=datetime.now)
# xml_content = Column(String)
#
# class Batch(Base):
# __tablename__ = 'batches'
#
# id = Column(Integer, primary_key = True)
# name = Column(String)
# configuration_profile_id = Column(Integer)
# windows_credential_id = Column(Integer)
, which may contain function names, class names, or code. Output only the next line. | cp = dbsession.query(ConfigurationProfile).filter_by(id=batch.configuration_profile_id).first() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.