Instruction stringlengths 362 7.83k | output_code stringlengths 1 945 |
|---|---|
Based on the snippet: <|code_start|>
admin.autodiscover()
app_name = "test_accounts"
urlpatterns = [
path(
"register/",
include(
ModelInvitation(org_model=Account, namespace="invitations").urls,
namespace="account_invitations",
<|code_end|>
, predict the immediate next line with the help of imports:
from django.conf.urls import include
from django.urls import path
from django.contrib import admin
from organizations.backends.modeled import ModelInvitation
from test_accounts.models import Account
and context (classes, functions, sometimes code) from other files:
# Path: test_accounts/models.py
# class Account(OrganizationBase):
# monthly_subscription = models.IntegerField(default=1000)
. Output only the next line. | ), |
Based on the snippet: <|code_start|># -*- coding: utf-8 -*-
@override_settings(USE_TZ=True)
class ActiveManagerTests(TestCase):
fixtures = ["users.json", "orgs.json"]
def test_active(self):
self.assertEqual(3, Organization.objects.all().count())
<|code_end|>
, predict the immediate next line with the help of imports:
from functools import partial
from django.contrib.auth.models import User
from django.db import IntegrityError
from django.test import TestCase
from django.test.utils import override_settings
from organizations.models import Organization
from organizations.models import OrganizationInvitation
from organizations.models import OrganizationOwner
from organizations.models import OrganizationUser
from organizations.utils import create_organization
from test_abstract.models import CustomOrganization
from test_accounts.models import Account
from test_accounts.models import AccountInvitation
from test_custom.models import Team
from organizations.exceptions import OwnershipRequired
from organizations.exceptions import OrganizationMismatch
and context (classes, functions, sometimes code) from other files:
# Path: test_abstract/models.py
# class CustomOrganization(AbstractOrganization):
# street_address = models.CharField(max_length=100, default="")
# city = models.CharField(max_length=100, default="")
#
# Path: test_accounts/models.py
# class Account(OrganizationBase):
# monthly_subscription = models.IntegerField(default=1000)
#
# Path: test_accounts/models.py
# class AccountInvitation(OrganizationInvitationBase):
#
# def get_absolute_url(self):
# """Returns the invitation URL"""
# return reverse(
# "test_accounts:account_invitations:invitations_register",
# kwargs={"guid": str(self.guid)},
# )
. Output only the next line. | self.assertEqual(2, Organization.active.all().count()) |
Predict the next line for this snippet: <|code_start|># -*- coding: utf-8 -*-
@override_settings(USE_TZ=True)
class ActiveManagerTests(TestCase):
fixtures = ["users.json", "orgs.json"]
def test_active(self):
self.assertEqual(3, Organization.objects.all().count())
self.assertEqual(2, Organization.active.all().count())
def test_by_user(self):
user = User.objects.get(username="dave")
self.assertEqual(3, Organization.objects.get_for_user(user).count())
self.assertEqual(2, Organization.active.get_for_user(user).count())
@override_settings(USE_TZ=True)
<|code_end|>
with the help of current file imports:
from functools import partial
from django.contrib.auth.models import User
from django.db import IntegrityError
from django.test import TestCase
from django.test.utils import override_settings
from organizations.models import Organization
from organizations.models import OrganizationInvitation
from organizations.models import OrganizationOwner
from organizations.models import OrganizationUser
from organizations.utils import create_organization
from test_abstract.models import CustomOrganization
from test_accounts.models import Account
from test_accounts.models import AccountInvitation
from test_custom.models import Team
from organizations.exceptions import OwnershipRequired
from organizations.exceptions import OrganizationMismatch
and context from other files:
# Path: test_abstract/models.py
# class CustomOrganization(AbstractOrganization):
# street_address = models.CharField(max_length=100, default="")
# city = models.CharField(max_length=100, default="")
#
# Path: test_accounts/models.py
# class Account(OrganizationBase):
# monthly_subscription = models.IntegerField(default=1000)
#
# Path: test_accounts/models.py
# class AccountInvitation(OrganizationInvitationBase):
#
# def get_absolute_url(self):
# """Returns the invitation URL"""
# return reverse(
# "test_accounts:account_invitations:invitations_register",
# kwargs={"guid": str(self.guid)},
# )
, which may contain function names, class names, or code. Output only the next line. | class OrgModelTests(TestCase): |
Predict the next line for this snippet: <|code_start|># -*- coding: utf-8 -*-
@override_settings(USE_TZ=True)
class ActiveManagerTests(TestCase):
fixtures = ["users.json", "orgs.json"]
def test_active(self):
<|code_end|>
with the help of current file imports:
from functools import partial
from django.contrib.auth.models import User
from django.db import IntegrityError
from django.test import TestCase
from django.test.utils import override_settings
from organizations.models import Organization
from organizations.models import OrganizationInvitation
from organizations.models import OrganizationOwner
from organizations.models import OrganizationUser
from organizations.utils import create_organization
from test_abstract.models import CustomOrganization
from test_accounts.models import Account
from test_accounts.models import AccountInvitation
from test_custom.models import Team
from organizations.exceptions import OwnershipRequired
from organizations.exceptions import OrganizationMismatch
and context from other files:
# Path: test_abstract/models.py
# class CustomOrganization(AbstractOrganization):
# street_address = models.CharField(max_length=100, default="")
# city = models.CharField(max_length=100, default="")
#
# Path: test_accounts/models.py
# class Account(OrganizationBase):
# monthly_subscription = models.IntegerField(default=1000)
#
# Path: test_accounts/models.py
# class AccountInvitation(OrganizationInvitationBase):
#
# def get_absolute_url(self):
# """Returns the invitation URL"""
# return reverse(
# "test_accounts:account_invitations:invitations_register",
# kwargs={"guid": str(self.guid)},
# )
, which may contain function names, class names, or code. Output only the next line. | self.assertEqual(3, Organization.objects.all().count()) |
Predict the next line for this snippet: <|code_start|>
@fixture
def tmpfile(tmpdir):
tmpfile = tmpdir.join('tmpfile')
assert not tmpfile.exists()
yield tmpfile.strpath
def assert_locked(tmpfile):
with ShouldRaise(flock.Locked(tmpfile)):
with flock(tmpfile):
raise AssertionError('this should not work')
class DescribeFlock:
def it_allows_first_caller(self, tmpfile):
with flock(tmpfile):
print('oh, hi!')
def it_disallows_subsequent_callers(self, tmpfile):
with flock(tmpfile):
print('oh, hi!')
<|code_end|>
with the help of current file imports:
from pytest import fixture
from testfixtures import ShouldRaise
from pgctl.flock import flock
from os.path import exists
from pgctl.subprocess import Popen
import os.path
and context from other files:
# Path: pgctl/flock.py
# @contextmanager
# def flock(file_or_dir, **acquire_args):
# """A context for flock.acquire()."""
# fd = None
# while fd is None:
# fd = acquire(file_or_dir, **acquire_args)
# try:
# yield fd
# finally:
# os.close(fd)
, which may contain function names, class names, or code. Output only the next line. | assert_locked(tmpfile) |
Given the code snippet: <|code_start|>
pytestmark = pytest.mark.usefixtures('in_example_dir')
SLOW_STARTUP_TIME = 6
@pytest.fixture
def service_name():
yield 'poll-ready-quick-shutdown'
def it_stops_quickly():
"""Tests a regression in pgctl where services using pgctl-poll-ready fail to
stop because the background process started by pgctl-poll-ready isn't dying
quickly."""
check_call(('pgctl', 'start'))
prestop_time = time.time()
check_call(('pgctl', 'stop'))
poststop_time = time.time()
<|code_end|>
, generate the next line using the imports in this file:
import time
import pytest
from pgctl.subprocess import check_call
and context (functions, classes, or occasionally code) from other files:
# Path: pgctl/subprocess.py
# def set_defaults(func, **defaults):
# def wrapped(cmd, **kwargs):
# PIPE = subprocess.PIPE
# STDOUT = subprocess.STDOUT
. Output only the next line. | assert poststop_time - prestop_time < 2 |
Given snippet: <|code_start|> if error.errno == 3: # no such process
pass
else:
raise
limit -= 1
class DirtyTest:
@pytest.fixture(autouse=True)
def cleanup(self, in_example_dir):
try:
yield in_example_dir
finally:
for service in in_example_dir.join('playground').listdir():
clean_service(str(service))
class DescribeOrphanSubprocess(DirtyTest):
@pytest.fixture(autouse=True)
def environment(self):
os.environ['PGCTL_TIMEOUT'] = '5'
yield
del os.environ['PGCTL_TIMEOUT']
@pytest.fixture
def service_name(self):
yield 'orphan-subprocess'
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import os
import signal
import subprocess
import time
import pytest
from testing import norm
from testing.assertions import assert_svstat
from testing.assertions import wait_for
from testing.service_context import set_slow_shutdown_sleeptime
from testing.subprocess import assert_command
from pgctl.daemontools import SvStat
from pgctl.daemontools import svstat
from pgctl.errors import LockHeld
from pgctl.functions import show_runaway_processes
from pgctl.fuser import fuser
from pgctl.subprocess import check_call
and context:
# Path: pgctl/daemontools.py
# class SvStat(
# namedtuple('SvStat', ['state', 'pid', 'exitcode', 'seconds', 'process'])
# ):
# __slots__ = ()
# UNSUPERVISED = 'could not get status, supervisor is down'
# INVALID = 'no such service'
#
# def __repr__(self):
# format = '{0.state}'
# if self.pid is not None:
# format += ' (pid {0.pid})'
# if self.exitcode is not None:
# format += ' (exitcode {0.exitcode})'
# if self.seconds is not None:
# format += ' {0.seconds} seconds'
# if self.process is not None:
# format += ', {0.process}'
#
# return format.format(self)
#
# Path: pgctl/daemontools.py
# def svstat(path):
# return svstat_parse(svstat_string(path))
#
# Path: pgctl/errors.py
# class LockHeld(PgctlUserMessage):
# """The pgctl supervision lock is held. This generally indicates subprocesses escaping supervision."""
#
# Path: pgctl/functions.py
# def show_runaway_processes(path):
# from .fuser import fuser
# processes = ps(fuser(path))
# if processes:
# raise LockHeld(
# '''\
# these runaway processes did not stop:
# {}
# This usually means these processes are buggy.
# Normally pgctl would kill these automatically for you, but you specified the --no-force option.
# Learn more: https://pgctl.readthedocs.org/en/latest/user/quickstart.html#writing-playground-services
# '''.format(processes)
# )
#
# Path: pgctl/fuser.py
# def fuser(path, allow_deleted=False):
# """Return the list of pids that have 'path' open, for the current user"""
# search = stat(path)
# if search is None and not allow_deleted:
# return
#
# from glob import glob
# for fddir in glob('/proc/*/fd/'):
# try:
# pid = int(fddir.split('/', 3)[2])
# except ValueError:
# continue
#
# fds = listdir(fddir)
# for fd in fds:
# from os.path import join
# fd = join(fddir, fd)
# found = stat(fd)
# if found is None:
# # fd disappeared since we listed
# continue
#
# if found == search:
# yield pid
# break
#
# if allow_deleted and found.st_nlink == 0:
# from os import readlink
# if readlink(fd) == path + ' (deleted)':
# yield pid
# break
#
# Path: pgctl/subprocess.py
# def set_defaults(func, **defaults):
# def wrapped(cmd, **kwargs):
# PIPE = subprocess.PIPE
# STDOUT = subprocess.STDOUT
which might include code, classes, or functions. Output only the next line. | def it_starts_up_fine(self): |
Given snippet: <|code_start|>
def clean_service(service_path):
# we use SIGTERM; SIGKILL is cheating.
limit = 100
while limit > 0: # pragma: no branch: we don't expect to ever hit the limit
assert os.path.isdir(service_path), service_path
try:
show_runaway_processes(service_path)
print('lock released -- done.')
break
except LockHeld:
print('lock held -- killing!')
for pid in fuser(service_path):
try:
os.system('ps -fj %i' % pid)
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import os
import signal
import subprocess
import time
import pytest
from testing import norm
from testing.assertions import assert_svstat
from testing.assertions import wait_for
from testing.service_context import set_slow_shutdown_sleeptime
from testing.subprocess import assert_command
from pgctl.daemontools import SvStat
from pgctl.daemontools import svstat
from pgctl.errors import LockHeld
from pgctl.functions import show_runaway_processes
from pgctl.fuser import fuser
from pgctl.subprocess import check_call
and context:
# Path: pgctl/daemontools.py
# class SvStat(
# namedtuple('SvStat', ['state', 'pid', 'exitcode', 'seconds', 'process'])
# ):
# __slots__ = ()
# UNSUPERVISED = 'could not get status, supervisor is down'
# INVALID = 'no such service'
#
# def __repr__(self):
# format = '{0.state}'
# if self.pid is not None:
# format += ' (pid {0.pid})'
# if self.exitcode is not None:
# format += ' (exitcode {0.exitcode})'
# if self.seconds is not None:
# format += ' {0.seconds} seconds'
# if self.process is not None:
# format += ', {0.process}'
#
# return format.format(self)
#
# Path: pgctl/daemontools.py
# def svstat(path):
# return svstat_parse(svstat_string(path))
#
# Path: pgctl/errors.py
# class LockHeld(PgctlUserMessage):
# """The pgctl supervision lock is held. This generally indicates subprocesses escaping supervision."""
#
# Path: pgctl/functions.py
# def show_runaway_processes(path):
# from .fuser import fuser
# processes = ps(fuser(path))
# if processes:
# raise LockHeld(
# '''\
# these runaway processes did not stop:
# {}
# This usually means these processes are buggy.
# Normally pgctl would kill these automatically for you, but you specified the --no-force option.
# Learn more: https://pgctl.readthedocs.org/en/latest/user/quickstart.html#writing-playground-services
# '''.format(processes)
# )
#
# Path: pgctl/fuser.py
# def fuser(path, allow_deleted=False):
# """Return the list of pids that have 'path' open, for the current user"""
# search = stat(path)
# if search is None and not allow_deleted:
# return
#
# from glob import glob
# for fddir in glob('/proc/*/fd/'):
# try:
# pid = int(fddir.split('/', 3)[2])
# except ValueError:
# continue
#
# fds = listdir(fddir)
# for fd in fds:
# from os.path import join
# fd = join(fddir, fd)
# found = stat(fd)
# if found is None:
# # fd disappeared since we listed
# continue
#
# if found == search:
# yield pid
# break
#
# if allow_deleted and found.st_nlink == 0:
# from os import readlink
# if readlink(fd) == path + ' (deleted)':
# yield pid
# break
#
# Path: pgctl/subprocess.py
# def set_defaults(func, **defaults):
# def wrapped(cmd, **kwargs):
# PIPE = subprocess.PIPE
# STDOUT = subprocess.STDOUT
which might include code, classes, or functions. Output only the next line. | os.kill(pid, signal.SIGTERM) |
Given snippet: <|code_start|>
def clean_service(service_path):
# we use SIGTERM; SIGKILL is cheating.
limit = 100
while limit > 0: # pragma: no branch: we don't expect to ever hit the limit
assert os.path.isdir(service_path), service_path
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import os
import signal
import subprocess
import time
import pytest
from testing import norm
from testing.assertions import assert_svstat
from testing.assertions import wait_for
from testing.service_context import set_slow_shutdown_sleeptime
from testing.subprocess import assert_command
from pgctl.daemontools import SvStat
from pgctl.daemontools import svstat
from pgctl.errors import LockHeld
from pgctl.functions import show_runaway_processes
from pgctl.fuser import fuser
from pgctl.subprocess import check_call
and context:
# Path: pgctl/daemontools.py
# class SvStat(
# namedtuple('SvStat', ['state', 'pid', 'exitcode', 'seconds', 'process'])
# ):
# __slots__ = ()
# UNSUPERVISED = 'could not get status, supervisor is down'
# INVALID = 'no such service'
#
# def __repr__(self):
# format = '{0.state}'
# if self.pid is not None:
# format += ' (pid {0.pid})'
# if self.exitcode is not None:
# format += ' (exitcode {0.exitcode})'
# if self.seconds is not None:
# format += ' {0.seconds} seconds'
# if self.process is not None:
# format += ', {0.process}'
#
# return format.format(self)
#
# Path: pgctl/daemontools.py
# def svstat(path):
# return svstat_parse(svstat_string(path))
#
# Path: pgctl/errors.py
# class LockHeld(PgctlUserMessage):
# """The pgctl supervision lock is held. This generally indicates subprocesses escaping supervision."""
#
# Path: pgctl/functions.py
# def show_runaway_processes(path):
# from .fuser import fuser
# processes = ps(fuser(path))
# if processes:
# raise LockHeld(
# '''\
# these runaway processes did not stop:
# {}
# This usually means these processes are buggy.
# Normally pgctl would kill these automatically for you, but you specified the --no-force option.
# Learn more: https://pgctl.readthedocs.org/en/latest/user/quickstart.html#writing-playground-services
# '''.format(processes)
# )
#
# Path: pgctl/fuser.py
# def fuser(path, allow_deleted=False):
# """Return the list of pids that have 'path' open, for the current user"""
# search = stat(path)
# if search is None and not allow_deleted:
# return
#
# from glob import glob
# for fddir in glob('/proc/*/fd/'):
# try:
# pid = int(fddir.split('/', 3)[2])
# except ValueError:
# continue
#
# fds = listdir(fddir)
# for fd in fds:
# from os.path import join
# fd = join(fddir, fd)
# found = stat(fd)
# if found is None:
# # fd disappeared since we listed
# continue
#
# if found == search:
# yield pid
# break
#
# if allow_deleted and found.st_nlink == 0:
# from os import readlink
# if readlink(fd) == path + ' (deleted)':
# yield pid
# break
#
# Path: pgctl/subprocess.py
# def set_defaults(func, **defaults):
# def wrapped(cmd, **kwargs):
# PIPE = subprocess.PIPE
# STDOUT = subprocess.STDOUT
which might include code, classes, or functions. Output only the next line. | try: |
Using the snippet: <|code_start|> if error.errno == 3: # no such process
pass
else:
raise
limit -= 1
class DirtyTest:
@pytest.fixture(autouse=True)
def cleanup(self, in_example_dir):
try:
yield in_example_dir
finally:
for service in in_example_dir.join('playground').listdir():
clean_service(str(service))
class DescribeOrphanSubprocess(DirtyTest):
@pytest.fixture(autouse=True)
def environment(self):
os.environ['PGCTL_TIMEOUT'] = '5'
yield
del os.environ['PGCTL_TIMEOUT']
@pytest.fixture
def service_name(self):
yield 'orphan-subprocess'
<|code_end|>
, determine the next line of code. You have imports:
import os
import signal
import subprocess
import time
import pytest
from testing import norm
from testing.assertions import assert_svstat
from testing.assertions import wait_for
from testing.service_context import set_slow_shutdown_sleeptime
from testing.subprocess import assert_command
from pgctl.daemontools import SvStat
from pgctl.daemontools import svstat
from pgctl.errors import LockHeld
from pgctl.functions import show_runaway_processes
from pgctl.fuser import fuser
from pgctl.subprocess import check_call
and context (class names, function names, or code) available:
# Path: pgctl/daemontools.py
# class SvStat(
# namedtuple('SvStat', ['state', 'pid', 'exitcode', 'seconds', 'process'])
# ):
# __slots__ = ()
# UNSUPERVISED = 'could not get status, supervisor is down'
# INVALID = 'no such service'
#
# def __repr__(self):
# format = '{0.state}'
# if self.pid is not None:
# format += ' (pid {0.pid})'
# if self.exitcode is not None:
# format += ' (exitcode {0.exitcode})'
# if self.seconds is not None:
# format += ' {0.seconds} seconds'
# if self.process is not None:
# format += ', {0.process}'
#
# return format.format(self)
#
# Path: pgctl/daemontools.py
# def svstat(path):
# return svstat_parse(svstat_string(path))
#
# Path: pgctl/errors.py
# class LockHeld(PgctlUserMessage):
# """The pgctl supervision lock is held. This generally indicates subprocesses escaping supervision."""
#
# Path: pgctl/functions.py
# def show_runaway_processes(path):
# from .fuser import fuser
# processes = ps(fuser(path))
# if processes:
# raise LockHeld(
# '''\
# these runaway processes did not stop:
# {}
# This usually means these processes are buggy.
# Normally pgctl would kill these automatically for you, but you specified the --no-force option.
# Learn more: https://pgctl.readthedocs.org/en/latest/user/quickstart.html#writing-playground-services
# '''.format(processes)
# )
#
# Path: pgctl/fuser.py
# def fuser(path, allow_deleted=False):
# """Return the list of pids that have 'path' open, for the current user"""
# search = stat(path)
# if search is None and not allow_deleted:
# return
#
# from glob import glob
# for fddir in glob('/proc/*/fd/'):
# try:
# pid = int(fddir.split('/', 3)[2])
# except ValueError:
# continue
#
# fds = listdir(fddir)
# for fd in fds:
# from os.path import join
# fd = join(fddir, fd)
# found = stat(fd)
# if found is None:
# # fd disappeared since we listed
# continue
#
# if found == search:
# yield pid
# break
#
# if allow_deleted and found.st_nlink == 0:
# from os import readlink
# if readlink(fd) == path + ' (deleted)':
# yield pid
# break
#
# Path: pgctl/subprocess.py
# def set_defaults(func, **defaults):
# def wrapped(cmd, **kwargs):
# PIPE = subprocess.PIPE
# STDOUT = subprocess.STDOUT
. Output only the next line. | def it_starts_up_fine(self): |
Given the following code snippet before the placeholder: <|code_start|>
def clean_service(service_path):
# we use SIGTERM; SIGKILL is cheating.
limit = 100
while limit > 0: # pragma: no branch: we don't expect to ever hit the limit
assert os.path.isdir(service_path), service_path
try:
show_runaway_processes(service_path)
print('lock released -- done.')
break
except LockHeld:
print('lock held -- killing!')
for pid in fuser(service_path):
try:
os.system('ps -fj %i' % pid)
os.kill(pid, signal.SIGTERM)
except OSError as error: # race condition -- process stopped between list and kill :pragma: no-cover
if error.errno == 3: # no such process
pass
else:
raise
limit -= 1
class DirtyTest:
@pytest.fixture(autouse=True)
def cleanup(self, in_example_dir):
<|code_end|>
, predict the next line using imports from the current file:
import os
import signal
import subprocess
import time
import pytest
from testing import norm
from testing.assertions import assert_svstat
from testing.assertions import wait_for
from testing.service_context import set_slow_shutdown_sleeptime
from testing.subprocess import assert_command
from pgctl.daemontools import SvStat
from pgctl.daemontools import svstat
from pgctl.errors import LockHeld
from pgctl.functions import show_runaway_processes
from pgctl.fuser import fuser
from pgctl.subprocess import check_call
and context including class names, function names, and sometimes code from other files:
# Path: pgctl/daemontools.py
# class SvStat(
# namedtuple('SvStat', ['state', 'pid', 'exitcode', 'seconds', 'process'])
# ):
# __slots__ = ()
# UNSUPERVISED = 'could not get status, supervisor is down'
# INVALID = 'no such service'
#
# def __repr__(self):
# format = '{0.state}'
# if self.pid is not None:
# format += ' (pid {0.pid})'
# if self.exitcode is not None:
# format += ' (exitcode {0.exitcode})'
# if self.seconds is not None:
# format += ' {0.seconds} seconds'
# if self.process is not None:
# format += ', {0.process}'
#
# return format.format(self)
#
# Path: pgctl/daemontools.py
# def svstat(path):
# return svstat_parse(svstat_string(path))
#
# Path: pgctl/errors.py
# class LockHeld(PgctlUserMessage):
# """The pgctl supervision lock is held. This generally indicates subprocesses escaping supervision."""
#
# Path: pgctl/functions.py
# def show_runaway_processes(path):
# from .fuser import fuser
# processes = ps(fuser(path))
# if processes:
# raise LockHeld(
# '''\
# these runaway processes did not stop:
# {}
# This usually means these processes are buggy.
# Normally pgctl would kill these automatically for you, but you specified the --no-force option.
# Learn more: https://pgctl.readthedocs.org/en/latest/user/quickstart.html#writing-playground-services
# '''.format(processes)
# )
#
# Path: pgctl/fuser.py
# def fuser(path, allow_deleted=False):
# """Return the list of pids that have 'path' open, for the current user"""
# search = stat(path)
# if search is None and not allow_deleted:
# return
#
# from glob import glob
# for fddir in glob('/proc/*/fd/'):
# try:
# pid = int(fddir.split('/', 3)[2])
# except ValueError:
# continue
#
# fds = listdir(fddir)
# for fd in fds:
# from os.path import join
# fd = join(fddir, fd)
# found = stat(fd)
# if found is None:
# # fd disappeared since we listed
# continue
#
# if found == search:
# yield pid
# break
#
# if allow_deleted and found.st_nlink == 0:
# from os import readlink
# if readlink(fd) == path + ' (deleted)':
# yield pid
# break
#
# Path: pgctl/subprocess.py
# def set_defaults(func, **defaults):
# def wrapped(cmd, **kwargs):
# PIPE = subprocess.PIPE
# STDOUT = subprocess.STDOUT
. Output only the next line. | try: |
Predict the next line after this snippet: <|code_start|>
class DescribeRetry:
def it_can_succeed(self):
assert wait_for(lambda: True) is True
def it_can_fail(self):
<|code_end|>
using the current file's imports:
from testfixtures import ShouldRaise
from .assertions import wait_for
and any relevant context from other files:
# Path: tests/testing/assertions.py
# def wait_for(assertion, sleep=.05, limit=10.0):
# """Some flakey assertions need to be retried."""
# # TODO(Yelp/pgctl#28): take this out once we can 'check'
# import time
# start = time.time()
# while True:
# try:
# truth = assertion()
# assert truth is None or truth
# return truth
# except AssertionError:
# if time.time() - start > limit:
# raise
# else:
# time.sleep(sleep)
. Output only the next line. | with ShouldRaise(AssertionError('assert (False is None or False)')): |
Predict the next line after this snippet: <|code_start|>
@pytest.fixture(autouse=True)
def sleep_short_background_long_foreground():
with set_slow_shutdown_sleeptime(0.75, 2.25):
yield
@pytest.mark.parametrize('service_name', ['slow-shutdown'])
<|code_end|>
using the current file's imports:
from time import sleep
from testing import norm
from testing.service_context import set_slow_shutdown_sleeptime
from testing.subprocess import assert_command
from testing.subprocess import show_both
from pgctl.subprocess import PIPE
from pgctl.subprocess import Popen
import pytest
and any relevant context from other files:
# Path: pgctl/subprocess.py
# PIPE = subprocess.PIPE
#
# Path: pgctl/subprocess.py
# def set_defaults(func, **defaults):
# def wrapped(cmd, **kwargs):
# PIPE = subprocess.PIPE
# STDOUT = subprocess.STDOUT
. Output only the next line. | @pytest.mark.usefixtures('in_example_dir') |
Given the code snippet: <|code_start|>
@pytest.fixture(autouse=True)
def sleep_short_background_long_foreground():
with set_slow_shutdown_sleeptime(0.75, 2.25):
yield
@pytest.mark.parametrize('service_name', ['slow-shutdown'])
@pytest.mark.usefixtures('in_example_dir')
def it_is_disallowed():
<|code_end|>
, generate the next line using the imports in this file:
from time import sleep
from testing import norm
from testing.service_context import set_slow_shutdown_sleeptime
from testing.subprocess import assert_command
from testing.subprocess import show_both
from pgctl.subprocess import PIPE
from pgctl.subprocess import Popen
import pytest
and context (functions, classes, or occasionally code) from other files:
# Path: pgctl/subprocess.py
# PIPE = subprocess.PIPE
#
# Path: pgctl/subprocess.py
# def set_defaults(func, **defaults):
# def wrapped(cmd, **kwargs):
# PIPE = subprocess.PIPE
# STDOUT = subprocess.STDOUT
. Output only the next line. | assert_command( |
Given the following code snippet before the placeholder: <|code_start|>
class ANY_INTEGER:
def __eq__(self, other):
return isinstance(other, int)
class DescribePgctlLog:
@pytest.fixture
def service_name(self):
yield 'output'
def it_is_empty_before_anything_starts(self, in_example_dir):
assert_command(
<|code_end|>
, predict the next line using imports from the current file:
import json
import os
import subprocess
import pytest
import fcntl
import time
from unittest import mock
from testing import norm
from testing import pty
from testing.assertions import assert_svstat
from testing.assertions import wait_for
from testing.subprocess import assert_command
from testing.subprocess import ctrl_c
from testing.subprocess import run
from pgctl.daemontools import SvStat
from pgctl.subprocess import check_call
from pgctl.subprocess import PIPE
from pgctl.subprocess import Popen
from testfixtures import StringComparison as S
from testing import copy_example
and context including class names, function names, and sometimes code from other files:
# Path: pgctl/daemontools.py
# class SvStat(
# namedtuple('SvStat', ['state', 'pid', 'exitcode', 'seconds', 'process'])
# ):
# __slots__ = ()
# UNSUPERVISED = 'could not get status, supervisor is down'
# INVALID = 'no such service'
#
# def __repr__(self):
# format = '{0.state}'
# if self.pid is not None:
# format += ' (pid {0.pid})'
# if self.exitcode is not None:
# format += ' (exitcode {0.exitcode})'
# if self.seconds is not None:
# format += ' {0.seconds} seconds'
# if self.process is not None:
# format += ', {0.process}'
#
# return format.format(self)
#
# Path: pgctl/subprocess.py
# def set_defaults(func, **defaults):
# def wrapped(cmd, **kwargs):
# PIPE = subprocess.PIPE
# STDOUT = subprocess.STDOUT
#
# Path: pgctl/subprocess.py
# PIPE = subprocess.PIPE
#
# Path: pgctl/subprocess.py
# def set_defaults(func, **defaults):
# def wrapped(cmd, **kwargs):
# PIPE = subprocess.PIPE
# STDOUT = subprocess.STDOUT
. Output only the next line. | ('pgctl', 'log'), |
Here is a snippet: <|code_start|>
class ANY_INTEGER:
def __eq__(self, other):
return isinstance(other, int)
class DescribePgctlLog:
@pytest.fixture
def service_name(self):
yield 'output'
def it_is_empty_before_anything_starts(self, in_example_dir):
<|code_end|>
. Write the next line using the current file imports:
import json
import os
import subprocess
import pytest
import fcntl
import time
from unittest import mock
from testing import norm
from testing import pty
from testing.assertions import assert_svstat
from testing.assertions import wait_for
from testing.subprocess import assert_command
from testing.subprocess import ctrl_c
from testing.subprocess import run
from pgctl.daemontools import SvStat
from pgctl.subprocess import check_call
from pgctl.subprocess import PIPE
from pgctl.subprocess import Popen
from testfixtures import StringComparison as S
from testing import copy_example
and context from other files:
# Path: pgctl/daemontools.py
# class SvStat(
# namedtuple('SvStat', ['state', 'pid', 'exitcode', 'seconds', 'process'])
# ):
# __slots__ = ()
# UNSUPERVISED = 'could not get status, supervisor is down'
# INVALID = 'no such service'
#
# def __repr__(self):
# format = '{0.state}'
# if self.pid is not None:
# format += ' (pid {0.pid})'
# if self.exitcode is not None:
# format += ' (exitcode {0.exitcode})'
# if self.seconds is not None:
# format += ' {0.seconds} seconds'
# if self.process is not None:
# format += ', {0.process}'
#
# return format.format(self)
#
# Path: pgctl/subprocess.py
# def set_defaults(func, **defaults):
# def wrapped(cmd, **kwargs):
# PIPE = subprocess.PIPE
# STDOUT = subprocess.STDOUT
#
# Path: pgctl/subprocess.py
# PIPE = subprocess.PIPE
#
# Path: pgctl/subprocess.py
# def set_defaults(func, **defaults):
# def wrapped(cmd, **kwargs):
# PIPE = subprocess.PIPE
# STDOUT = subprocess.STDOUT
, which may include functions, classes, or code. Output only the next line. | assert_command( |
Given snippet: <|code_start|>
class ANY_INTEGER:
def __eq__(self, other):
return isinstance(other, int)
class DescribePgctlLog:
@pytest.fixture
def service_name(self):
yield 'output'
def it_is_empty_before_anything_starts(self, in_example_dir):
assert_command(
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import json
import os
import subprocess
import pytest
import fcntl
import time
from unittest import mock
from testing import norm
from testing import pty
from testing.assertions import assert_svstat
from testing.assertions import wait_for
from testing.subprocess import assert_command
from testing.subprocess import ctrl_c
from testing.subprocess import run
from pgctl.daemontools import SvStat
from pgctl.subprocess import check_call
from pgctl.subprocess import PIPE
from pgctl.subprocess import Popen
from testfixtures import StringComparison as S
from testing import copy_example
and context:
# Path: pgctl/daemontools.py
# class SvStat(
# namedtuple('SvStat', ['state', 'pid', 'exitcode', 'seconds', 'process'])
# ):
# __slots__ = ()
# UNSUPERVISED = 'could not get status, supervisor is down'
# INVALID = 'no such service'
#
# def __repr__(self):
# format = '{0.state}'
# if self.pid is not None:
# format += ' (pid {0.pid})'
# if self.exitcode is not None:
# format += ' (exitcode {0.exitcode})'
# if self.seconds is not None:
# format += ' {0.seconds} seconds'
# if self.process is not None:
# format += ', {0.process}'
#
# return format.format(self)
#
# Path: pgctl/subprocess.py
# def set_defaults(func, **defaults):
# def wrapped(cmd, **kwargs):
# PIPE = subprocess.PIPE
# STDOUT = subprocess.STDOUT
#
# Path: pgctl/subprocess.py
# PIPE = subprocess.PIPE
#
# Path: pgctl/subprocess.py
# def set_defaults(func, **defaults):
# def wrapped(cmd, **kwargs):
# PIPE = subprocess.PIPE
# STDOUT = subprocess.STDOUT
which might include code, classes, or functions. Output only the next line. | ('pgctl', 'log'), |
Continue the code snippet: <|code_start|>
@contextmanager
def setup(tmpdir):
etc = tmpdir.ensure_dir('etc')
home = tmpdir.ensure_dir('home')
app = tmpdir.ensure_dir('app')
a = app.ensure_dir('a')
b = a.ensure_dir('b')
c = b.ensure_dir('c')
etc.join('my.conf').write('[my]\netc = etc')
home.join('.my.json').write('{"home":"home"}')
app.join('my.ini').write('''\
[my]
<|code_end|>
. Use current file imports:
import os
import pytest
from argparse import Namespace
from contextlib import contextmanager
from unittest import mock
from testing.norm import norm_trailing_whitespace_json
from testing.subprocess import assert_command
from pgctl.config import Config
from sys import executable
and context (classes, functions, or code) from other files:
# Path: pgctl/config.py
# class Config:
#
# def __init__(self, projectname, defaults=None):
# self.projectname = projectname
# self.defaults = defaults
#
# def from_file(self, filename):
# # TODO P3: refactor this spaghetti
# # TODO(ckuehl|2019-08-08): why do we support .ini files??
# if filename.endswith(('.conf', '.ini')):
# parser = configparser.ConfigParser()
# parser.read(filename)
# result = dict(parser.items(self.projectname))
# for key, value in result.items():
# if key.endswith('_list'):
# value = result.pop(key).split()
# key = key.rsplit('_list', 1)[0]
# result[key] = value
# return result
# elif filename.endswith(('.yaml', '.yml')):
# return yaml.load(
# open(filename),
# Loader=getattr(yaml, 'CSafeLoader', yaml.SafeLoader),
# )
# elif filename.endswith('.json'):
# return json.load(open(filename))
# else:
# raise UnrecognizedConfig('Unknown config type: %s' % filename)
#
# def from_glob(self, pattern):
# from pgctl.configsearch import glob
# results = []
# for fname in glob(pattern):
# try:
# config = self.from_file(fname)
# except UnrecognizedConfig:
# continue
# else:
# results.append(config)
#
# if len(results) == 1:
# return results[0]
# elif len(results) > 1:
# raise AmbiguousConfig('multiple configurations found at %s' % pattern)
#
# def from_path_prefix(self, pattern_prefix):
# pattern = ''.join((pattern_prefix, self.projectname, '.*'))
# return self.from_glob(pattern)
#
# def from_system(self):
# if environ.get('PGCTL_NO_GLOBAL_CONFIG') == 'true':
# return {}
# etc = join(environ.get('PREFIX', '/'), 'etc', '')
# return self.from_path_prefix(etc)
#
# def from_homedir(self):
# if environ.get('PGCTL_NO_GLOBAL_CONFIG') == 'true':
# return {}
# home = environ.get('HOME', '$HOME')
# return self.from_path_prefix(home + '/.')
#
# def from_environ(self, env=None):
# if env is None:
# env = environ
#
# var_prefix = self.projectname.upper() + '_'
# config = {}
# for varname, value in env.items():
# if varname.startswith(var_prefix):
# varname = varname.replace(var_prefix, '', 1).lower()
# if varname.endswith('_list'):
# varname = varname.rsplit('_list', 1)[0]
# value = value.split()
# config[varname] = value
# return config
#
# def from_app(self, path='.'):
# pattern = self.projectname + '.*'
# return merge(
# self.from_glob(join(parentdir, pattern))
# for parentdir in reversed(tuple(search_parent_directories(path)))
# )
#
# def from_cli(self, args):
# configs = []
# if args.config is not None:
# configs.append(self.from_file(args.config))
# configs.append(vars(args))
# return merge(configs)
#
# def combined(self, defaults=(), args=Dummy()):
# return merge((
# defaults,
# self.from_system(),
# self.from_homedir(),
# self.from_app(),
# self.from_environ(),
# self.from_cli(args),
# ))
. Output only the next line. | app = app |
Next line prediction: <|code_start|> assert read_line(read) == 'What is your name?\n'
proc.stdin.write(b'Buck\n')
proc.stdin.flush()
assert read_line(read) == 'Hello, Buck.\n'
finally:
ctrl_c(proc)
proc.wait()
@greeter_service
def it_works_with_nothing_running():
assert_svstat('playground/greeter', state=SvStat.UNSUPERVISED)
assert_works_interactively()
@greeter_service
def it_fails_with_multiple_services():
assert_command(
('pgctl', 'debug', 'abc', 'def'),
'',
'[pgctl] ERROR: Must debug exactly one service, not: abc, def\n',
1,
)
@greeter_service
def it_first_stops_the_background_service_if_running():
check_call(('pgctl', 'start', 'greeter'))
assert_svstat('playground/greeter', state='up')
<|code_end|>
. Use current file imports:
(import os
import pytest
from testing import pty
from testing.assertions import assert_svstat
from testing.assertions import wait_for
from testing.subprocess import assert_command
from testing.subprocess import ctrl_c
from pgctl.daemontools import SvStat
from pgctl.subprocess import check_call
from pgctl.subprocess import PIPE
from pgctl.subprocess import Popen
from os import read)
and context including class names, function names, or small code snippets from other files:
# Path: pgctl/daemontools.py
# class SvStat(
# namedtuple('SvStat', ['state', 'pid', 'exitcode', 'seconds', 'process'])
# ):
# __slots__ = ()
# UNSUPERVISED = 'could not get status, supervisor is down'
# INVALID = 'no such service'
#
# def __repr__(self):
# format = '{0.state}'
# if self.pid is not None:
# format += ' (pid {0.pid})'
# if self.exitcode is not None:
# format += ' (exitcode {0.exitcode})'
# if self.seconds is not None:
# format += ' {0.seconds} seconds'
# if self.process is not None:
# format += ', {0.process}'
#
# return format.format(self)
#
# Path: pgctl/subprocess.py
# def set_defaults(func, **defaults):
# def wrapped(cmd, **kwargs):
# PIPE = subprocess.PIPE
# STDOUT = subprocess.STDOUT
#
# Path: pgctl/subprocess.py
# PIPE = subprocess.PIPE
#
# Path: pgctl/subprocess.py
# def set_defaults(func, **defaults):
# def wrapped(cmd, **kwargs):
# PIPE = subprocess.PIPE
# STDOUT = subprocess.STDOUT
. Output only the next line. | assert_works_interactively() |
Predict the next line for this snippet: <|code_start|>
pytestmark = pytest.mark.usefixtures('in_example_dir')
greeter_service = pytest.mark.parametrize('service_name', ['greeter'])
unreliable_service = pytest.mark.parametrize('service_name', ['unreliable'])
def read_line(fd):
# read one-byte-at-a-time to avoid deadlocking by reading too much
line = ''
byte = None
while byte not in ('\n', ''):
<|code_end|>
with the help of current file imports:
import os
import pytest
from testing import pty
from testing.assertions import assert_svstat
from testing.assertions import wait_for
from testing.subprocess import assert_command
from testing.subprocess import ctrl_c
from pgctl.daemontools import SvStat
from pgctl.subprocess import check_call
from pgctl.subprocess import PIPE
from pgctl.subprocess import Popen
from os import read
and context from other files:
# Path: pgctl/daemontools.py
# class SvStat(
# namedtuple('SvStat', ['state', 'pid', 'exitcode', 'seconds', 'process'])
# ):
# __slots__ = ()
# UNSUPERVISED = 'could not get status, supervisor is down'
# INVALID = 'no such service'
#
# def __repr__(self):
# format = '{0.state}'
# if self.pid is not None:
# format += ' (pid {0.pid})'
# if self.exitcode is not None:
# format += ' (exitcode {0.exitcode})'
# if self.seconds is not None:
# format += ' {0.seconds} seconds'
# if self.process is not None:
# format += ', {0.process}'
#
# return format.format(self)
#
# Path: pgctl/subprocess.py
# def set_defaults(func, **defaults):
# def wrapped(cmd, **kwargs):
# PIPE = subprocess.PIPE
# STDOUT = subprocess.STDOUT
#
# Path: pgctl/subprocess.py
# PIPE = subprocess.PIPE
#
# Path: pgctl/subprocess.py
# def set_defaults(func, **defaults):
# def wrapped(cmd, **kwargs):
# PIPE = subprocess.PIPE
# STDOUT = subprocess.STDOUT
, which may contain function names, class names, or code. Output only the next line. | byte = read(fd, 1).decode('utf-8') |
Using the snippet: <|code_start|>
pytestmark = pytest.mark.usefixtures('in_example_dir')
greeter_service = pytest.mark.parametrize('service_name', ['greeter'])
unreliable_service = pytest.mark.parametrize('service_name', ['unreliable'])
def read_line(fd):
# read one-byte-at-a-time to avoid deadlocking by reading too much
line = ''
byte = None
while byte not in ('\n', ''):
byte = read(fd, 1).decode('utf-8')
line += byte
return line
@greeter_service
def assert_works_interactively():
<|code_end|>
, determine the next line of code. You have imports:
import os
import pytest
from testing import pty
from testing.assertions import assert_svstat
from testing.assertions import wait_for
from testing.subprocess import assert_command
from testing.subprocess import ctrl_c
from pgctl.daemontools import SvStat
from pgctl.subprocess import check_call
from pgctl.subprocess import PIPE
from pgctl.subprocess import Popen
from os import read
and context (class names, function names, or code) available:
# Path: pgctl/daemontools.py
# class SvStat(
# namedtuple('SvStat', ['state', 'pid', 'exitcode', 'seconds', 'process'])
# ):
# __slots__ = ()
# UNSUPERVISED = 'could not get status, supervisor is down'
# INVALID = 'no such service'
#
# def __repr__(self):
# format = '{0.state}'
# if self.pid is not None:
# format += ' (pid {0.pid})'
# if self.exitcode is not None:
# format += ' (exitcode {0.exitcode})'
# if self.seconds is not None:
# format += ' {0.seconds} seconds'
# if self.process is not None:
# format += ', {0.process}'
#
# return format.format(self)
#
# Path: pgctl/subprocess.py
# def set_defaults(func, **defaults):
# def wrapped(cmd, **kwargs):
# PIPE = subprocess.PIPE
# STDOUT = subprocess.STDOUT
#
# Path: pgctl/subprocess.py
# PIPE = subprocess.PIPE
#
# Path: pgctl/subprocess.py
# def set_defaults(func, **defaults):
# def wrapped(cmd, **kwargs):
# PIPE = subprocess.PIPE
# STDOUT = subprocess.STDOUT
. Output only the next line. | read, write = os.openpty() |
Given snippet: <|code_start|>
pytestmark = pytest.mark.usefixtures('in_example_dir')
greeter_service = pytest.mark.parametrize('service_name', ['greeter'])
unreliable_service = pytest.mark.parametrize('service_name', ['unreliable'])
def read_line(fd):
# read one-byte-at-a-time to avoid deadlocking by reading too much
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import os
import pytest
from testing import pty
from testing.assertions import assert_svstat
from testing.assertions import wait_for
from testing.subprocess import assert_command
from testing.subprocess import ctrl_c
from pgctl.daemontools import SvStat
from pgctl.subprocess import check_call
from pgctl.subprocess import PIPE
from pgctl.subprocess import Popen
from os import read
and context:
# Path: pgctl/daemontools.py
# class SvStat(
# namedtuple('SvStat', ['state', 'pid', 'exitcode', 'seconds', 'process'])
# ):
# __slots__ = ()
# UNSUPERVISED = 'could not get status, supervisor is down'
# INVALID = 'no such service'
#
# def __repr__(self):
# format = '{0.state}'
# if self.pid is not None:
# format += ' (pid {0.pid})'
# if self.exitcode is not None:
# format += ' (exitcode {0.exitcode})'
# if self.seconds is not None:
# format += ' {0.seconds} seconds'
# if self.process is not None:
# format += ', {0.process}'
#
# return format.format(self)
#
# Path: pgctl/subprocess.py
# def set_defaults(func, **defaults):
# def wrapped(cmd, **kwargs):
# PIPE = subprocess.PIPE
# STDOUT = subprocess.STDOUT
#
# Path: pgctl/subprocess.py
# PIPE = subprocess.PIPE
#
# Path: pgctl/subprocess.py
# def set_defaults(func, **defaults):
# def wrapped(cmd, **kwargs):
# PIPE = subprocess.PIPE
# STDOUT = subprocess.STDOUT
which might include code, classes, or functions. Output only the next line. | line = '' |
Given the code snippet: <|code_start|>
class DescribeUnique:
def it_does_not_have_duplicates(self):
data = ['b', 'b', 'b']
assert list(unique(data)) == ['b']
def it_removes_duplicates_with_first_one_wins_mentality(self):
data = ['a', 'b', 'c', 'b', 'd', 'a']
assert list(unique(data)) == ['a', 'b', 'c', 'd']
class DescribeJSONEncoder:
def it_encodes_frozendict(self):
test_dict = frozendict({
'pgdir': 'playground',
'services': ('default',),
<|code_end|>
, generate the next line using the imports in this file:
import os
import pytest
from unittest import mock
from frozendict import frozendict
from testfixtures import ShouldRaise
from testing.assertions import wait_for
from testing.norm import norm_trailing_whitespace_json
from pgctl.errors import LockHeld
from pgctl.functions import bestrelpath
from pgctl.functions import JSONEncoder
from pgctl.functions import logger_preexec
from pgctl.functions import show_runaway_processes
from pgctl.functions import supervisor_preexec
from pgctl.functions import terminate_processes
from pgctl.functions import unique
from pgctl.subprocess import Popen
and context (functions, classes, or occasionally code) from other files:
# Path: pgctl/errors.py
# class LockHeld(PgctlUserMessage):
# """The pgctl supervision lock is held. This generally indicates subprocesses escaping supervision."""
#
# Path: pgctl/functions.py
# def bestrelpath(path, relto=None):
# """Return a relative path only if it's under $PWD (or `relto`)"""
# if relto is None:
# from os import getcwd
# relto = getcwd()
# from os.path import relpath
# relpath = relpath(path, relto)
# if relpath.startswith('.'):
# return path
# else:
# return relpath
#
# Path: pgctl/functions.py
# class JSONEncoder(json.JSONEncoder):
# """knows that frozendict is like dict"""
#
# def default(self, o):
# if isinstance(o, frozendict):
# return dict(o)
# else:
# # Let the base class default method raise the TypeError
# return json.JSONEncoder.default(self, o)
#
# Path: pgctl/functions.py
# def logger_preexec(log_path):
# """Pre exec func. for starting the logger process for a service.
#
# Before execing the logger service (s6-log), connect stdin to the logging
# FIFO so that it reads log lines from the service, and connect stdout/stderr
# to the void since we ignore the logger's console output.
# (The logger writes actual log output to files in $SERVICE_DIR/logs.)
#
# :param log_path: path to the logging FIFO
# """
# # Even though this is technically RDONLY, we open
# # it as RDWR to avoid blocking
# #
# # http://bugs.python.org/issue10635
# log_fifo_reader = os.open(log_path, os.O_RDWR)
# devnull = os.open(os.devnull, os.O_WRONLY)
#
# os.dup2(log_fifo_reader, StreamFileDescriptor.STDIN)
# os.dup2(devnull, StreamFileDescriptor.STDOUT)
# os.dup2(devnull, StreamFileDescriptor.STDERR)
#
# os.close(log_fifo_reader)
# os.close(devnull)
#
# Path: pgctl/functions.py
# def show_runaway_processes(path):
# from .fuser import fuser
# processes = ps(fuser(path))
# if processes:
# raise LockHeld(
# '''\
# these runaway processes did not stop:
# {}
# This usually means these processes are buggy.
# Normally pgctl would kill these automatically for you, but you specified the --no-force option.
# Learn more: https://pgctl.readthedocs.org/en/latest/user/quickstart.html#writing-playground-services
# '''.format(processes)
# )
#
# Path: pgctl/functions.py
# def supervisor_preexec(log_path):
# """Pre exec func. for starting a service.
#
# Before execing the service, attach the output streams of the supervised
# process to the logging FIFO so that they will be logged to a file by the
# service's logger (s6-log). Also, attach the service's stdin to the void
# since it's running in a supervised context (and shouldn't have any data
# going to stdin).
#
# :param log_path: path to the logging pipe
# """
# # Should be WRONLY, but we can't block (see logger_preexec)
# log_fifo_writer = os.open(log_path, os.O_RDWR)
#
# devnull = os.open(os.devnull, os.O_RDONLY)
# os.dup2(devnull, StreamFileDescriptor.STDIN)
# os.dup2(log_fifo_writer, StreamFileDescriptor.STDOUT)
# os.dup2(log_fifo_writer, StreamFileDescriptor.STDERR)
#
# os.close(log_fifo_writer)
# os.close(devnull)
#
# Path: pgctl/functions.py
# def terminate_processes(pids: typing.Iterable[int], is_stop: bool = True) -> typing.Optional[str]:
# """forcefully kill processes"""
# processes = ps(pids)
# if processes:
# for pid in pids:
# try:
# os.kill(pid, signal.SIGKILL)
# except OSError: # pragma: no cover
# # race condition: processes stopped slightly after timeout, before we kill it
# pass
#
# if is_stop:
# return '''WARNING: Killing these runaway processes which did not stop:
# {}
# This usually means these processes are buggy.
# Learn more: https://pgctl.readthedocs.org/en/latest/user/quickstart.html#writing-playground-services
# '''.format(processes)
# else:
# return '''WARNING: Killing these processes which were still running but escaped supervision:
# {}
# This usually means that s6-supervise was not stopped cleanly (e.g. manually killed).
# Learn more: https://pgctl.readthedocs.io/en/latest/user/usage.html#stop
# '''.format(processes)
#
# Path: pgctl/functions.py
# def unique(iterable):
# """remove duplicates while preserving ordering -- first one wins"""
# return tuple(_unique(iterable))
#
# Path: pgctl/subprocess.py
# def set_defaults(func, **defaults):
# def wrapped(cmd, **kwargs):
# PIPE = subprocess.PIPE
# STDOUT = subprocess.STDOUT
. Output only the next line. | 'aliases': frozendict({ |
Predict the next line for this snippet: <|code_start|>
@pytest.fixture(autouse=True)
def in_tmpdir(tmpdir):
with tmpdir.as_cwd():
yield
class DescribeFloatFile:
def it_loads_files(self):
filename = 'notification-fd'
with open(filename, 'w') as f:
f.write('5')
result = poll_ready.floatfile(filename)
assert isinstance(result, float)
<|code_end|>
with the help of current file imports:
import os
import pytest
from unittest import mock
from pgctl import poll_ready
and context from other files:
# Path: pgctl/poll_ready.py
# def floatfile(filename):
# def getval(filename, envname, default):
# def check_ready():
# def wait_for_down_signal(down_fifo, seconds):
# def pgctl_poll_ready(down_fifo, notification_fd, timeout, poll_ready, poll_down, check_ready=check_ready):
# def main():
, which may contain function names, class names, or code. Output only the next line. | assert result == 5.0 |
Predict the next line for this snippet: <|code_start|>def svstat_parse(svstat_string):
r'''
>>> svstat_parse('up (pid 3714560) 13 seconds, normally down, ready 7 seconds\n')
ready (pid 3714560) 7 seconds
>>> svstat_parse('up (pid 1202562) 100 seconds, ready 10 seconds\n')
ready (pid 1202562) 10 seconds
>>> svstat_parse('up (pid 1202562) 100 seconds\n')
up (pid 1202562) 100 seconds
>>> svstat_parse('down 4334 seconds, normally up, want up')
down 4334 seconds, starting
>>> svstat_parse('down (exitcode 0) 0 seconds, normally up, want up, ready 0 seconds')
down (exitcode 0) 0 seconds, starting
>>> svstat_parse('down 0 seconds, normally up')
down 0 seconds
>>> svstat_parse('up (pid 1202) 1 seconds, want down\n')
up (pid 1202) 1 seconds, stopping
>>> svstat_parse('down 0 seconds, normally up')
down 0 seconds
>>> svstat_parse('s6-svstat: fatal: unable to read status for wat: No such file or directory')
could not get status, supervisor is down
>>> svstat_parse("s6-svstat: fatal: unable to read status for sweet: Broken pipe\n")
<|code_end|>
with the help of current file imports:
from collections import namedtuple
from .debug import trace
from .errors import Unsupervised
from .subprocess import CalledProcessError
from .subprocess import PIPE
from .subprocess import Popen
from .subprocess import STDOUT
import sys
and context from other files:
# Path: pgctl/debug.py
# def trace(msg, *args):
# debug(msg, *args, level=3)
#
# Path: pgctl/errors.py
# class Unsupervised(Exception):
# """The pgctl supervision process has gone missing."""
#
# Path: pgctl/subprocess.py
# def set_defaults(func, **defaults):
# def wrapped(cmd, **kwargs):
# PIPE = subprocess.PIPE
# STDOUT = subprocess.STDOUT
#
# Path: pgctl/subprocess.py
# PIPE = subprocess.PIPE
#
# Path: pgctl/subprocess.py
# def set_defaults(func, **defaults):
# def wrapped(cmd, **kwargs):
# PIPE = subprocess.PIPE
# STDOUT = subprocess.STDOUT
#
# Path: pgctl/subprocess.py
# STDOUT = subprocess.STDOUT
, which may contain function names, class names, or code. Output only the next line. | could not get status, supervisor is down |
Using the snippet: <|code_start|> ready (pid 3714560) 7 seconds
>>> svstat_parse('up (pid 1202562) 100 seconds, ready 10 seconds\n')
ready (pid 1202562) 10 seconds
>>> svstat_parse('up (pid 1202562) 100 seconds\n')
up (pid 1202562) 100 seconds
>>> svstat_parse('down 4334 seconds, normally up, want up')
down 4334 seconds, starting
>>> svstat_parse('down (exitcode 0) 0 seconds, normally up, want up, ready 0 seconds')
down (exitcode 0) 0 seconds, starting
>>> svstat_parse('down 0 seconds, normally up')
down 0 seconds
>>> svstat_parse('up (pid 1202) 1 seconds, want down\n')
up (pid 1202) 1 seconds, stopping
>>> svstat_parse('down 0 seconds, normally up')
down 0 seconds
>>> svstat_parse('s6-svstat: fatal: unable to read status for wat: No such file or directory')
could not get status, supervisor is down
>>> svstat_parse("s6-svstat: fatal: unable to read status for sweet: Broken pipe\n")
could not get status, supervisor is down
>>> svstat_parse('unable to chdir: file does not exist')
<|code_end|>
, determine the next line of code. You have imports:
from collections import namedtuple
from .debug import trace
from .errors import Unsupervised
from .subprocess import CalledProcessError
from .subprocess import PIPE
from .subprocess import Popen
from .subprocess import STDOUT
import sys
and context (class names, function names, or code) available:
# Path: pgctl/debug.py
# def trace(msg, *args):
# debug(msg, *args, level=3)
#
# Path: pgctl/errors.py
# class Unsupervised(Exception):
# """The pgctl supervision process has gone missing."""
#
# Path: pgctl/subprocess.py
# def set_defaults(func, **defaults):
# def wrapped(cmd, **kwargs):
# PIPE = subprocess.PIPE
# STDOUT = subprocess.STDOUT
#
# Path: pgctl/subprocess.py
# PIPE = subprocess.PIPE
#
# Path: pgctl/subprocess.py
# def set_defaults(func, **defaults):
# def wrapped(cmd, **kwargs):
# PIPE = subprocess.PIPE
# STDOUT = subprocess.STDOUT
#
# Path: pgctl/subprocess.py
# STDOUT = subprocess.STDOUT
. Output only the next line. | no such service |
Using the snippet: <|code_start|>def svstat_parse(svstat_string):
r'''
>>> svstat_parse('up (pid 3714560) 13 seconds, normally down, ready 7 seconds\n')
ready (pid 3714560) 7 seconds
>>> svstat_parse('up (pid 1202562) 100 seconds, ready 10 seconds\n')
ready (pid 1202562) 10 seconds
>>> svstat_parse('up (pid 1202562) 100 seconds\n')
up (pid 1202562) 100 seconds
>>> svstat_parse('down 4334 seconds, normally up, want up')
down 4334 seconds, starting
>>> svstat_parse('down (exitcode 0) 0 seconds, normally up, want up, ready 0 seconds')
down (exitcode 0) 0 seconds, starting
>>> svstat_parse('down 0 seconds, normally up')
down 0 seconds
>>> svstat_parse('up (pid 1202) 1 seconds, want down\n')
up (pid 1202) 1 seconds, stopping
>>> svstat_parse('down 0 seconds, normally up')
down 0 seconds
>>> svstat_parse('s6-svstat: fatal: unable to read status for wat: No such file or directory')
could not get status, supervisor is down
>>> svstat_parse("s6-svstat: fatal: unable to read status for sweet: Broken pipe\n")
<|code_end|>
, determine the next line of code. You have imports:
from collections import namedtuple
from .debug import trace
from .errors import Unsupervised
from .subprocess import CalledProcessError
from .subprocess import PIPE
from .subprocess import Popen
from .subprocess import STDOUT
import sys
and context (class names, function names, or code) available:
# Path: pgctl/debug.py
# def trace(msg, *args):
# debug(msg, *args, level=3)
#
# Path: pgctl/errors.py
# class Unsupervised(Exception):
# """The pgctl supervision process has gone missing."""
#
# Path: pgctl/subprocess.py
# def set_defaults(func, **defaults):
# def wrapped(cmd, **kwargs):
# PIPE = subprocess.PIPE
# STDOUT = subprocess.STDOUT
#
# Path: pgctl/subprocess.py
# PIPE = subprocess.PIPE
#
# Path: pgctl/subprocess.py
# def set_defaults(func, **defaults):
# def wrapped(cmd, **kwargs):
# PIPE = subprocess.PIPE
# STDOUT = subprocess.STDOUT
#
# Path: pgctl/subprocess.py
# STDOUT = subprocess.STDOUT
. Output only the next line. | could not get status, supervisor is down |
Continue the code snippet: <|code_start|> up (pid 1202562) 100 seconds
>>> svstat_parse('down 4334 seconds, normally up, want up')
down 4334 seconds, starting
>>> svstat_parse('down (exitcode 0) 0 seconds, normally up, want up, ready 0 seconds')
down (exitcode 0) 0 seconds, starting
>>> svstat_parse('down 0 seconds, normally up')
down 0 seconds
>>> svstat_parse('up (pid 1202) 1 seconds, want down\n')
up (pid 1202) 1 seconds, stopping
>>> svstat_parse('down 0 seconds, normally up')
down 0 seconds
>>> svstat_parse('s6-svstat: fatal: unable to read status for wat: No such file or directory')
could not get status, supervisor is down
>>> svstat_parse("s6-svstat: fatal: unable to read status for sweet: Broken pipe\n")
could not get status, supervisor is down
>>> svstat_parse('unable to chdir: file does not exist')
no such service
>>> svstat_parse('totally unpredictable error message')
totally unpredictable error message
>>> svstat_parse('down (exitcode 0) 0 seconds, normally up, want wat, ready 0 seconds')
<|code_end|>
. Use current file imports:
from collections import namedtuple
from .debug import trace
from .errors import Unsupervised
from .subprocess import CalledProcessError
from .subprocess import PIPE
from .subprocess import Popen
from .subprocess import STDOUT
import sys
and context (classes, functions, or code) from other files:
# Path: pgctl/debug.py
# def trace(msg, *args):
# debug(msg, *args, level=3)
#
# Path: pgctl/errors.py
# class Unsupervised(Exception):
# """The pgctl supervision process has gone missing."""
#
# Path: pgctl/subprocess.py
# def set_defaults(func, **defaults):
# def wrapped(cmd, **kwargs):
# PIPE = subprocess.PIPE
# STDOUT = subprocess.STDOUT
#
# Path: pgctl/subprocess.py
# PIPE = subprocess.PIPE
#
# Path: pgctl/subprocess.py
# def set_defaults(func, **defaults):
# def wrapped(cmd, **kwargs):
# PIPE = subprocess.PIPE
# STDOUT = subprocess.STDOUT
#
# Path: pgctl/subprocess.py
# STDOUT = subprocess.STDOUT
. Output only the next line. | Traceback (most recent call last): |
Given the following code snippet before the placeholder: <|code_start|> string = string[len(start):]
try:
result, string = string.split(divider, 1)
except ValueError:
# if there's no separator found and we found the `start` token, the whole input is the result
result, string = string, ''
else:
result = None
if result is not None:
result = type(result)
return result, string
def svstat_parse(svstat_string):
r'''
>>> svstat_parse('up (pid 3714560) 13 seconds, normally down, ready 7 seconds\n')
ready (pid 3714560) 7 seconds
>>> svstat_parse('up (pid 1202562) 100 seconds, ready 10 seconds\n')
ready (pid 1202562) 10 seconds
>>> svstat_parse('up (pid 1202562) 100 seconds\n')
up (pid 1202562) 100 seconds
>>> svstat_parse('down 4334 seconds, normally up, want up')
down 4334 seconds, starting
>>> svstat_parse('down (exitcode 0) 0 seconds, normally up, want up, ready 0 seconds')
down (exitcode 0) 0 seconds, starting
<|code_end|>
, predict the next line using imports from the current file:
from collections import namedtuple
from .debug import trace
from .errors import Unsupervised
from .subprocess import CalledProcessError
from .subprocess import PIPE
from .subprocess import Popen
from .subprocess import STDOUT
import sys
and context including class names, function names, and sometimes code from other files:
# Path: pgctl/debug.py
# def trace(msg, *args):
# debug(msg, *args, level=3)
#
# Path: pgctl/errors.py
# class Unsupervised(Exception):
# """The pgctl supervision process has gone missing."""
#
# Path: pgctl/subprocess.py
# def set_defaults(func, **defaults):
# def wrapped(cmd, **kwargs):
# PIPE = subprocess.PIPE
# STDOUT = subprocess.STDOUT
#
# Path: pgctl/subprocess.py
# PIPE = subprocess.PIPE
#
# Path: pgctl/subprocess.py
# def set_defaults(func, **defaults):
# def wrapped(cmd, **kwargs):
# PIPE = subprocess.PIPE
# STDOUT = subprocess.STDOUT
#
# Path: pgctl/subprocess.py
# STDOUT = subprocess.STDOUT
. Output only the next line. | >>> svstat_parse('down 0 seconds, normally up') |
Based on the snippet: <|code_start|> status = status.decode('UTF-8')
#status is listed per line for each argument
return status
def parse(string, start, divider, type=str):
"""general purpose tokenizer, used below"""
if string.startswith(start):
string = string[len(start):]
try:
result, string = string.split(divider, 1)
except ValueError:
# if there's no separator found and we found the `start` token, the whole input is the result
result, string = string, ''
else:
result = None
if result is not None:
result = type(result)
return result, string
def svstat_parse(svstat_string):
r'''
>>> svstat_parse('up (pid 3714560) 13 seconds, normally down, ready 7 seconds\n')
ready (pid 3714560) 7 seconds
>>> svstat_parse('up (pid 1202562) 100 seconds, ready 10 seconds\n')
ready (pid 1202562) 10 seconds
<|code_end|>
, predict the immediate next line with the help of imports:
from collections import namedtuple
from .debug import trace
from .errors import Unsupervised
from .subprocess import CalledProcessError
from .subprocess import PIPE
from .subprocess import Popen
from .subprocess import STDOUT
import sys
and context (classes, functions, sometimes code) from other files:
# Path: pgctl/debug.py
# def trace(msg, *args):
# debug(msg, *args, level=3)
#
# Path: pgctl/errors.py
# class Unsupervised(Exception):
# """The pgctl supervision process has gone missing."""
#
# Path: pgctl/subprocess.py
# def set_defaults(func, **defaults):
# def wrapped(cmd, **kwargs):
# PIPE = subprocess.PIPE
# STDOUT = subprocess.STDOUT
#
# Path: pgctl/subprocess.py
# PIPE = subprocess.PIPE
#
# Path: pgctl/subprocess.py
# def set_defaults(func, **defaults):
# def wrapped(cmd, **kwargs):
# PIPE = subprocess.PIPE
# STDOUT = subprocess.STDOUT
#
# Path: pgctl/subprocess.py
# STDOUT = subprocess.STDOUT
. Output only the next line. | >>> svstat_parse('up (pid 1202562) 100 seconds\n') |
Next line prediction: <|code_start|>
def test_tailer(tmp_path):
file_a = (tmp_path / 'a').open('a+')
file_b = (tmp_path / 'b').open('a+')
# At the start there should be no lines.
tailer = Tailer((file_a.name, file_b.name))
assert tailer.new_lines_available() is False
assert tailer.get_logs() == []
# It should pick up changes to a single file.
file_a.write('A\n')
file_a.flush()
wait_for(lambda: tailer.new_lines_available() is True)
assert tailer.get_logs() == [TailEvent(file_a.name, [b'A'])]
assert tailer.new_lines_available() is False
assert tailer.get_logs() == []
# It should pick up changes to multiple files.
file_a.write('A\nA\n')
file_a.flush()
file_b.write('B\n')
<|code_end|>
. Use current file imports:
(import os
import shutil
import pytest
from unittest import mock
from testing.assertions import wait_for
from pgctl.log_viewer import LogViewer
from pgctl.log_viewer import Tailer
from pgctl.log_viewer import TailEvent)
and context including class names, function names, or small code snippets from other files:
# Path: pgctl/log_viewer.py
# class LogViewer:
#
# def __init__(self, height: int, name_to_path: typing.Dict[str, str]):
# self._tailer = Tailer(name_to_path.values())
# self._prev_width = None
# self._visible_lines = []
# self._name_to_path = name_to_path
# self._path_to_name = {path: name for name, path in name_to_path.items()}
# self.height = height
#
# def move_cursor_to_top(self) -> str:
# if self._prev_width is not None:
# return f'\x1b[{self.height + 1}F'
# else:
# return ''
#
# def _terminal_width(self) -> int:
# columns = shutil.get_terminal_size((80, 20)).columns
# if columns <= 5:
# # This happens a lot with pty spawning (usually with 0x0 size).
# # Just default to something reasonable.
# return 80
# else:
# return columns
#
# def redraw_needed(self) -> bool:
# return self._tailer.new_lines_available() or self._prev_width != self._terminal_width()
#
# def clear_below(self) -> str:
# return '\x1b[0J'
#
# def draw_logs(self, title: str) -> str:
# width = self._terminal_width()
#
# log_events = self._tailer.get_logs(0)
# for event in log_events:
# for line in event.log_lines:
# service = self._path_to_name[event.path]
# line = ANSI_ESCAPES.sub('', line.decode('utf8', errors='replace'))
# self._visible_lines.append(f'[{service}] {line}')
# self._visible_lines = self._visible_lines[-(self.height - 2):]
#
# content = (
# # Disable screen wrap.
# '\x1b[?7l' +
# # Title
# title + '\n'
# # Re-enable screen wrap.
# '\x1b[?7h'
# ) + _drawn_box(width - 1, self.height, self._visible_lines)
#
# self._prev_width = width
#
# return content
#
# def stop_tailing(self, name: str) -> None:
# self._tailer.stop_tailing(self._name_to_path[name])
#
# def cleanup(self) -> None:
# self._tailer.cleanup()
#
# Path: pgctl/log_viewer.py
# class Tailer:
#
# def __init__(self, paths: typing.Iterable[str]) -> None:
# self._poll = select.poll()
# self._path_to_tail = {}
# self._fdno_to_path = {}
#
# for path in paths:
# self._path_to_tail[path] = proc = subprocess.Popen(
# ('tail', '-F', path),
# stdout=subprocess.PIPE,
# stderr=subprocess.DEVNULL,
# )
# self._fdno_to_path[proc.stdout.fileno()] = path
# self._poll.register(proc.stdout, select.POLLIN)
#
# # Put stdout in non-blocking mode so we can read from it without blocking.
# flags = fcntl.fcntl(proc.stdout.fileno(), fcntl.F_GETFL) | os.O_NONBLOCK
# fcntl.fcntl(proc.stdout.fileno(), fcntl.F_SETFL, flags)
#
# def get_logs(self, timeout: typing.Optional[float] = 0) -> typing.List[TailEvent]:
# fd_events = self._poll.poll(timeout)
# ret = []
# for fd, event in fd_events:
# content = b''
# while True:
# try:
# content += os.read(fd, 10000)
# except BlockingIOError:
# break
# ret.append(TailEvent(self._fdno_to_path[fd], content.splitlines()))
# return ret
#
# def new_lines_available(self) -> bool:
# return len(self._poll.poll(0)) > 0
#
# def stop_tailing(self, path: str) -> None:
# proc = self._path_to_tail[path]
# self._poll.unregister(proc.stdout)
# del self._fdno_to_path[proc.stdout.fileno()]
# del self._path_to_tail[path]
# proc.terminate()
# proc.communicate()
#
# def cleanup(self) -> None:
# for path in tuple(self._path_to_tail):
# self.stop_tailing(path)
#
# Path: pgctl/log_viewer.py
# class TailEvent(typing.NamedTuple):
# path: str
# log_lines: typing.Tuple[str]
. Output only the next line. | file_b.flush() |
Next line prediction: <|code_start|>
def test_tailer(tmp_path):
file_a = (tmp_path / 'a').open('a+')
file_b = (tmp_path / 'b').open('a+')
# At the start there should be no lines.
tailer = Tailer((file_a.name, file_b.name))
assert tailer.new_lines_available() is False
assert tailer.get_logs() == []
# It should pick up changes to a single file.
file_a.write('A\n')
file_a.flush()
wait_for(lambda: tailer.new_lines_available() is True)
assert tailer.get_logs() == [TailEvent(file_a.name, [b'A'])]
assert tailer.new_lines_available() is False
assert tailer.get_logs() == []
# It should pick up changes to multiple files.
file_a.write('A\nA\n')
file_a.flush()
file_b.write('B\n')
file_b.flush()
wait_for(lambda: len(tailer._poll.poll()) == 2)
assert tailer.new_lines_available() is True
assert sorted(tailer.get_logs()) == [
TailEvent(file_a.name, [b'A', b'A']),
TailEvent(file_b.name, [b'B']),
<|code_end|>
. Use current file imports:
(import os
import shutil
import pytest
from unittest import mock
from testing.assertions import wait_for
from pgctl.log_viewer import LogViewer
from pgctl.log_viewer import Tailer
from pgctl.log_viewer import TailEvent)
and context including class names, function names, or small code snippets from other files:
# Path: pgctl/log_viewer.py
# class LogViewer:
#
# def __init__(self, height: int, name_to_path: typing.Dict[str, str]):
# self._tailer = Tailer(name_to_path.values())
# self._prev_width = None
# self._visible_lines = []
# self._name_to_path = name_to_path
# self._path_to_name = {path: name for name, path in name_to_path.items()}
# self.height = height
#
# def move_cursor_to_top(self) -> str:
# if self._prev_width is not None:
# return f'\x1b[{self.height + 1}F'
# else:
# return ''
#
# def _terminal_width(self) -> int:
# columns = shutil.get_terminal_size((80, 20)).columns
# if columns <= 5:
# # This happens a lot with pty spawning (usually with 0x0 size).
# # Just default to something reasonable.
# return 80
# else:
# return columns
#
# def redraw_needed(self) -> bool:
# return self._tailer.new_lines_available() or self._prev_width != self._terminal_width()
#
# def clear_below(self) -> str:
# return '\x1b[0J'
#
# def draw_logs(self, title: str) -> str:
# width = self._terminal_width()
#
# log_events = self._tailer.get_logs(0)
# for event in log_events:
# for line in event.log_lines:
# service = self._path_to_name[event.path]
# line = ANSI_ESCAPES.sub('', line.decode('utf8', errors='replace'))
# self._visible_lines.append(f'[{service}] {line}')
# self._visible_lines = self._visible_lines[-(self.height - 2):]
#
# content = (
# # Disable screen wrap.
# '\x1b[?7l' +
# # Title
# title + '\n'
# # Re-enable screen wrap.
# '\x1b[?7h'
# ) + _drawn_box(width - 1, self.height, self._visible_lines)
#
# self._prev_width = width
#
# return content
#
# def stop_tailing(self, name: str) -> None:
# self._tailer.stop_tailing(self._name_to_path[name])
#
# def cleanup(self) -> None:
# self._tailer.cleanup()
#
# Path: pgctl/log_viewer.py
# class Tailer:
#
# def __init__(self, paths: typing.Iterable[str]) -> None:
# self._poll = select.poll()
# self._path_to_tail = {}
# self._fdno_to_path = {}
#
# for path in paths:
# self._path_to_tail[path] = proc = subprocess.Popen(
# ('tail', '-F', path),
# stdout=subprocess.PIPE,
# stderr=subprocess.DEVNULL,
# )
# self._fdno_to_path[proc.stdout.fileno()] = path
# self._poll.register(proc.stdout, select.POLLIN)
#
# # Put stdout in non-blocking mode so we can read from it without blocking.
# flags = fcntl.fcntl(proc.stdout.fileno(), fcntl.F_GETFL) | os.O_NONBLOCK
# fcntl.fcntl(proc.stdout.fileno(), fcntl.F_SETFL, flags)
#
# def get_logs(self, timeout: typing.Optional[float] = 0) -> typing.List[TailEvent]:
# fd_events = self._poll.poll(timeout)
# ret = []
# for fd, event in fd_events:
# content = b''
# while True:
# try:
# content += os.read(fd, 10000)
# except BlockingIOError:
# break
# ret.append(TailEvent(self._fdno_to_path[fd], content.splitlines()))
# return ret
#
# def new_lines_available(self) -> bool:
# return len(self._poll.poll(0)) > 0
#
# def stop_tailing(self, path: str) -> None:
# proc = self._path_to_tail[path]
# self._poll.unregister(proc.stdout)
# del self._fdno_to_path[proc.stdout.fileno()]
# del self._path_to_tail[path]
# proc.terminate()
# proc.communicate()
#
# def cleanup(self) -> None:
# for path in tuple(self._path_to_tail):
# self.stop_tailing(path)
#
# Path: pgctl/log_viewer.py
# class TailEvent(typing.NamedTuple):
# path: str
# log_lines: typing.Tuple[str]
. Output only the next line. | ] |
Given snippet: <|code_start|> def startElement(self, name, attrs):
"""map element stream to ET elements as they occur"""
# first level, stream starts
if self.depth == 0:
if name != "stream:stream":
raise BadFormatError
self.streamhandler(attrs)
self.depth = 1
# second level creates element tree
else:
self.treebuilder.start(name, self.makedictfromattrs(attrs))
self.depth += 1
def endElement(self, name):
if self.depth == 1:
self.streamhandler({})
self.depth = 0
elif self.depth >= 2:
self.treebuilder.end(name)
self.depth -= 1
if self.depth == 1:
tree = self.treebuilder.close()
self.contenthandler(tree)
def characters(self, content):
self.treebuilder.data(content)
def makedictfromattrs(self, attrs):
"""Attributes from sax are not dictionaries. ElementTree doesn't
copy automatically, so do it here and convert to ordered dict."""
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from collections import OrderedDict
from sqlalchemy.util import OrderedDict
from xml.sax import make_parser as sax_make_parser, SAXParseException
from xml.sax.handler import ContentHandler
from pyfire.logger import Logger
from pyfire.stream.errors import BadFormatError, InvalidXMLError
import xml.etree.ElementTree as ET
and context:
# Path: pyfire/logger.py
# class Logger(logbook.Logger):
# def __init__(self, name):
# classname = name.replace('.', '_').lower()
# if classname.startswith("pyfire_"):
# classname = classname[7:]
#
# try:
# level = config.get('logging', classname).upper()
# except config.NoOptionError:
# level = ''
#
# if not level:
# level = config.get('logging', 'global_level').upper()
#
# if level not in frozenset(['CRITICAL', 'ERROR', 'WARNING',
# 'INFO', 'DEBUG', 'NOTSET']):
# warnings.warn("No such loglevel %s" % level, RuntimeWarning)
# level = 'ERROR'
# super(Logger, self).__init__(classname, getattr(logbook, level))
# self.handlers.append(logbook.more.ColorizedStderrHandler())
# self._disabled = False
#
# def _set_disabled(self, value):
# self._disabled = value
#
# def _get_disabled(self):
# return global_disable or self._disabled
#
# disabled = property(_get_disabled, _set_disabled)
#
# Path: pyfire/stream/errors.py
# class BadFormatError(StreamError):
# """XML Format error"""
#
# def __init__(self):
# StreamError.__init__(self, "bad-format")
#
# class InvalidXMLError(StreamError):
# """Stream contains XML data that cannot be processed."""
#
# def __init__(self):
# StreamError.__init__(self, "invalid-xml")
which might include code, classes, or functions. Output only the next line. | retdict = OrderedDict(attrs.items()) |
Next line prediction: <|code_start|> if self.depth == 0:
if name != "stream:stream":
raise BadFormatError
self.streamhandler(attrs)
self.depth = 1
# second level creates element tree
else:
self.treebuilder.start(name, self.makedictfromattrs(attrs))
self.depth += 1
def endElement(self, name):
if self.depth == 1:
self.streamhandler({})
self.depth = 0
elif self.depth >= 2:
self.treebuilder.end(name)
self.depth -= 1
if self.depth == 1:
tree = self.treebuilder.close()
self.contenthandler(tree)
def characters(self, content):
self.treebuilder.data(content)
def makedictfromattrs(self, attrs):
"""Attributes from sax are not dictionaries. ElementTree doesn't
copy automatically, so do it here and convert to ordered dict."""
retdict = OrderedDict(attrs.items())
for k, v in attrs.items():
retdict[k] = v
<|code_end|>
. Use current file imports:
( from collections import OrderedDict
from sqlalchemy.util import OrderedDict
from xml.sax import make_parser as sax_make_parser, SAXParseException
from xml.sax.handler import ContentHandler
from pyfire.logger import Logger
from pyfire.stream.errors import BadFormatError, InvalidXMLError
import xml.etree.ElementTree as ET)
and context including class names, function names, or small code snippets from other files:
# Path: pyfire/logger.py
# class Logger(logbook.Logger):
# def __init__(self, name):
# classname = name.replace('.', '_').lower()
# if classname.startswith("pyfire_"):
# classname = classname[7:]
#
# try:
# level = config.get('logging', classname).upper()
# except config.NoOptionError:
# level = ''
#
# if not level:
# level = config.get('logging', 'global_level').upper()
#
# if level not in frozenset(['CRITICAL', 'ERROR', 'WARNING',
# 'INFO', 'DEBUG', 'NOTSET']):
# warnings.warn("No such loglevel %s" % level, RuntimeWarning)
# level = 'ERROR'
# super(Logger, self).__init__(classname, getattr(logbook, level))
# self.handlers.append(logbook.more.ColorizedStderrHandler())
# self._disabled = False
#
# def _set_disabled(self, value):
# self._disabled = value
#
# def _get_disabled(self):
# return global_disable or self._disabled
#
# disabled = property(_get_disabled, _set_disabled)
#
# Path: pyfire/stream/errors.py
# class BadFormatError(StreamError):
# """XML Format error"""
#
# def __init__(self):
# StreamError.__init__(self, "bad-format")
#
# class InvalidXMLError(StreamError):
# """Stream contains XML data that cannot be processed."""
#
# def __init__(self):
# StreamError.__init__(self, "invalid-xml")
. Output only the next line. | return retdict |
Continue the code snippet: <|code_start|> """map element stream to ET elements as they occur"""
# first level, stream starts
if self.depth == 0:
if name != "stream:stream":
raise BadFormatError
self.streamhandler(attrs)
self.depth = 1
# second level creates element tree
else:
self.treebuilder.start(name, self.makedictfromattrs(attrs))
self.depth += 1
def endElement(self, name):
if self.depth == 1:
self.streamhandler({})
self.depth = 0
elif self.depth >= 2:
self.treebuilder.end(name)
self.depth -= 1
if self.depth == 1:
tree = self.treebuilder.close()
self.contenthandler(tree)
def characters(self, content):
self.treebuilder.data(content)
def makedictfromattrs(self, attrs):
"""Attributes from sax are not dictionaries. ElementTree doesn't
copy automatically, so do it here and convert to ordered dict."""
retdict = OrderedDict(attrs.items())
<|code_end|>
. Use current file imports:
from collections import OrderedDict
from sqlalchemy.util import OrderedDict
from xml.sax import make_parser as sax_make_parser, SAXParseException
from xml.sax.handler import ContentHandler
from pyfire.logger import Logger
from pyfire.stream.errors import BadFormatError, InvalidXMLError
import xml.etree.ElementTree as ET
and context (classes, functions, or code) from other files:
# Path: pyfire/logger.py
# class Logger(logbook.Logger):
# def __init__(self, name):
# classname = name.replace('.', '_').lower()
# if classname.startswith("pyfire_"):
# classname = classname[7:]
#
# try:
# level = config.get('logging', classname).upper()
# except config.NoOptionError:
# level = ''
#
# if not level:
# level = config.get('logging', 'global_level').upper()
#
# if level not in frozenset(['CRITICAL', 'ERROR', 'WARNING',
# 'INFO', 'DEBUG', 'NOTSET']):
# warnings.warn("No such loglevel %s" % level, RuntimeWarning)
# level = 'ERROR'
# super(Logger, self).__init__(classname, getattr(logbook, level))
# self.handlers.append(logbook.more.ColorizedStderrHandler())
# self._disabled = False
#
# def _set_disabled(self, value):
# self._disabled = value
#
# def _get_disabled(self):
# return global_disable or self._disabled
#
# disabled = property(_get_disabled, _set_disabled)
#
# Path: pyfire/stream/errors.py
# class BadFormatError(StreamError):
# """XML Format error"""
#
# def __init__(self):
# StreamError.__init__(self, "bad-format")
#
# class InvalidXMLError(StreamError):
# """Stream contains XML data that cannot be processed."""
#
# def __init__(self):
# StreamError.__init__(self, "invalid-xml")
. Output only the next line. | for k, v in attrs.items(): |
Next line prediction: <|code_start|>
class UserTests(TestCase):
def setUp(self):
self.m_session = mock.Mock()
self.m_session.get.return_value = get_fake_response(data={'id': 'foo'})
self.user = user.User(self.m_session)
def test_id_is_foo(self):
self.assertEqual(self.user.me['id'], 'foo')
@mock.patch('groupy.api.user.blocks')
def test_blocks_uses_id(self, m_blocks):
self.user.blocks
(__, id_), __ = m_blocks.Blocks.call_args
self.assertEqual(id_, 'foo')
def test_update(self):
data = {'bar': 'foo'}
self.m_session.post.return_value = get_fake_response(data=data)
result = self.user.update(foo='bar')
self.assertEqual(result, data)
class SmsModeTests(TestCase):
def setUp(self):
self.m_session = mock.Mock()
self.sms_mode = user.SmsMode(self.m_session)
<|code_end|>
. Use current file imports:
(from unittest import mock
from groupy.api import user
from .base import get_fake_response
from .base import TestCase)
and context including class names, function names, or small code snippets from other files:
# Path: groupy/api/user.py
# class User(base.Manager):
# class SmsMode(base.Manager):
# def __init__(self, session):
# def blocks(self):
# def me(self):
# def get_me(self):
# def update(self, **params):
# def __init__(self, session):
# def enable(self, duration, registration_id=None):
# def disable(self):
#
# Path: tests/api/base.py
# def get_fake_response(code=200, data=None):
# response = mock.Mock()
# response.status_code = code
# response.data = data
# return response
#
# Path: tests/api/base.py
# class TestCase(unittest.TestCase):
# def assert_kwargs(self, mock, **kwargs):
# __, m_kwargs = mock.call_args
# for k, v in kwargs.items():
# with self.subTest(key=k, value=v):
# self.assertEqual(m_kwargs.get(k), v)
. Output only the next line. | self.m_session.post.return_value = mock.Mock(ok=True) |
Based on the snippet: <|code_start|>class UserTests(TestCase):
def setUp(self):
self.m_session = mock.Mock()
self.m_session.get.return_value = get_fake_response(data={'id': 'foo'})
self.user = user.User(self.m_session)
def test_id_is_foo(self):
self.assertEqual(self.user.me['id'], 'foo')
@mock.patch('groupy.api.user.blocks')
def test_blocks_uses_id(self, m_blocks):
self.user.blocks
(__, id_), __ = m_blocks.Blocks.call_args
self.assertEqual(id_, 'foo')
def test_update(self):
data = {'bar': 'foo'}
self.m_session.post.return_value = get_fake_response(data=data)
result = self.user.update(foo='bar')
self.assertEqual(result, data)
class SmsModeTests(TestCase):
def setUp(self):
self.m_session = mock.Mock()
self.sms_mode = user.SmsMode(self.m_session)
self.m_session.post.return_value = mock.Mock(ok=True)
class EnableSmsModeTests(SmsModeTests):
<|code_end|>
, predict the immediate next line with the help of imports:
from unittest import mock
from groupy.api import user
from .base import get_fake_response
from .base import TestCase
and context (classes, functions, sometimes code) from other files:
# Path: groupy/api/user.py
# class User(base.Manager):
# class SmsMode(base.Manager):
# def __init__(self, session):
# def blocks(self):
# def me(self):
# def get_me(self):
# def update(self, **params):
# def __init__(self, session):
# def enable(self, duration, registration_id=None):
# def disable(self):
#
# Path: tests/api/base.py
# def get_fake_response(code=200, data=None):
# response = mock.Mock()
# response.status_code = code
# response.data = data
# return response
#
# Path: tests/api/base.py
# class TestCase(unittest.TestCase):
# def assert_kwargs(self, mock, **kwargs):
# __, m_kwargs = mock.call_args
# for k, v in kwargs.items():
# with self.subTest(key=k, value=v):
# self.assertEqual(m_kwargs.get(k), v)
. Output only the next line. | def setUp(self): |
Based on the snippet: <|code_start|>
class UserTests(TestCase):
def setUp(self):
self.m_session = mock.Mock()
self.m_session.get.return_value = get_fake_response(data={'id': 'foo'})
self.user = user.User(self.m_session)
def test_id_is_foo(self):
self.assertEqual(self.user.me['id'], 'foo')
@mock.patch('groupy.api.user.blocks')
def test_blocks_uses_id(self, m_blocks):
self.user.blocks
<|code_end|>
, predict the immediate next line with the help of imports:
from unittest import mock
from groupy.api import user
from .base import get_fake_response
from .base import TestCase
and context (classes, functions, sometimes code) from other files:
# Path: groupy/api/user.py
# class User(base.Manager):
# class SmsMode(base.Manager):
# def __init__(self, session):
# def blocks(self):
# def me(self):
# def get_me(self):
# def update(self, **params):
# def __init__(self, session):
# def enable(self, duration, registration_id=None):
# def disable(self):
#
# Path: tests/api/base.py
# def get_fake_response(code=200, data=None):
# response = mock.Mock()
# response.status_code = code
# response.data = data
# return response
#
# Path: tests/api/base.py
# class TestCase(unittest.TestCase):
# def assert_kwargs(self, mock, **kwargs):
# __, m_kwargs = mock.call_args
# for k, v in kwargs.items():
# with self.subTest(key=k, value=v):
# self.assertEqual(m_kwargs.get(k), v)
. Output only the next line. | (__, id_), __ = m_blocks.Blocks.call_args |
Using the snippet: <|code_start|>
class ChatsTests(unittest.TestCase):
def setUp(self):
self.m_session = mock.Mock()
self.chats = chats.Chats(self.m_session)
class ListChatsTests(ChatsTests):
def setUp(self):
super().setUp()
m_chat = {
'other_user': {'id': 42},
'created_at': 123457890,
'updated_at': 123457891,
}
self.m_session.get.return_value = mock.Mock(data=[m_chat])
self.results = self.chats.list()
def test_results_contains_chats(self):
self.assertTrue(all(isinstance(c, chats.Chat) for c in self.results))
def test_results_is_a_ChatList(self):
self.assertTrue(isinstance(self.results, pagers.ChatList))
class ChatTests(unittest.TestCase):
def setUp(self):
self.m_manager = mock.Mock()
m_chat = {
'other_user': {'id': 42, 'name': 'foo'},
<|code_end|>
, determine the next line of code. You have imports:
import unittest
from unittest import mock
from groupy import pagers
from groupy.api import chats
and context (class names, function names, or code) available:
# Path: groupy/pagers.py
# class Pager:
# class GroupList(Pager):
# class ChatList(GroupList):
# class MessageList(Pager):
# class GalleryList(MessageList):
# def __init__(self, manager, endpoint, **params):
# def __getitem__(self, index):
# def __iter__(self):
# def set_next_page_params(self):
# def fetch(self):
# def fetch_next(self):
# def autopage(self):
# def set_next_page_params(self):
# def __init__(self, manager, endpoint, **params):
# def detect_mode(cls, **params):
# def set_next_page_params(self):
# def get_last_item_index(self):
# def get_next_page_param(self, item):
# def fetch_next(self):
# def get_next_page_param(self, item):
#
# Path: groupy/api/chats.py
# class Chats(base.Manager):
# class Chat(base.ManagedResource):
# def __init__(self, session):
# def _raw_list(self, **params):
# def list(self, page=1, per_page=10):
# def list_all(self, per_page=10):
# def __init__(self, manager, **data):
# def __repr__(self):
# def post(self, text=None, attachments=None):
. Output only the next line. | 'created_at': 123457890, |
Here is a snippet: <|code_start|>
class ChatsTests(unittest.TestCase):
def setUp(self):
self.m_session = mock.Mock()
self.chats = chats.Chats(self.m_session)
class ListChatsTests(ChatsTests):
def setUp(self):
super().setUp()
m_chat = {
<|code_end|>
. Write the next line using the current file imports:
import unittest
from unittest import mock
from groupy import pagers
from groupy.api import chats
and context from other files:
# Path: groupy/pagers.py
# class Pager:
# class GroupList(Pager):
# class ChatList(GroupList):
# class MessageList(Pager):
# class GalleryList(MessageList):
# def __init__(self, manager, endpoint, **params):
# def __getitem__(self, index):
# def __iter__(self):
# def set_next_page_params(self):
# def fetch(self):
# def fetch_next(self):
# def autopage(self):
# def set_next_page_params(self):
# def __init__(self, manager, endpoint, **params):
# def detect_mode(cls, **params):
# def set_next_page_params(self):
# def get_last_item_index(self):
# def get_next_page_param(self, item):
# def fetch_next(self):
# def get_next_page_param(self, item):
#
# Path: groupy/api/chats.py
# class Chats(base.Manager):
# class Chat(base.ManagedResource):
# def __init__(self, session):
# def _raw_list(self, **params):
# def list(self, page=1, per_page=10):
# def list_all(self, per_page=10):
# def __init__(self, manager, **data):
# def __repr__(self):
# def post(self, text=None, attachments=None):
, which may include functions, classes, or code. Output only the next line. | 'other_user': {'id': 42}, |
Given the code snippet: <|code_start|>
class GroupPostTests(GroupTests):
def setUp(self):
super().setUp()
self.group.messages = mock.Mock()
self.result = self.group.post(text='foo')
def test_messages_used(self):
self.assertTrue(self.group.messages.create.called)
class GroupUpdateTests(GroupTests):
def setUp(self):
super().setUp()
self.group.update(name='foowho')
def test_manager_used(self):
self.assertTrue(self.group.manager.update.called)
class GroupDestroyTests(GroupTests):
def setUp(self):
super().setUp()
self.group.destroy()
def test_manager_used(self):
self.assertTrue(self.group.manager.destroy.called)
class GroupRejoinTests(GroupTests):
<|code_end|>
, generate the next line using the imports in this file:
from unittest import mock
from .base import get_fake_response, get_fake_member_data, get_fake_group_data
from .base import TestCase
from groupy import pagers
from groupy.api import groups
and context (functions, classes, or occasionally code) from other files:
# Path: tests/api/base.py
# def get_fake_response(code=200, data=None):
# response = mock.Mock()
# response.status_code = code
# response.data = data
# return response
#
# def get_fake_member_data(**kwargs):
# data = {
# 'id': 'foo',
# 'user_id': 'baz',
# 'nickname': 'nick',
# }
# data.update(kwargs)
# return data
#
# def get_fake_group_data(**kwargs):
# group_data = {
# 'id': 'foo',
# 'name': 'foobar',
# 'group_id': 'bar',
# 'created_at': 1302623328,
# 'updated_at': 1302623329,
# 'office_mode': False,
# }
# group_data.update(kwargs)
# return group_data
#
# Path: tests/api/base.py
# class TestCase(unittest.TestCase):
# def assert_kwargs(self, mock, **kwargs):
# __, m_kwargs = mock.call_args
# for k, v in kwargs.items():
# with self.subTest(key=k, value=v):
# self.assertEqual(m_kwargs.get(k), v)
#
# Path: groupy/pagers.py
# class Pager:
# class GroupList(Pager):
# class ChatList(GroupList):
# class MessageList(Pager):
# class GalleryList(MessageList):
# def __init__(self, manager, endpoint, **params):
# def __getitem__(self, index):
# def __iter__(self):
# def set_next_page_params(self):
# def fetch(self):
# def fetch_next(self):
# def autopage(self):
# def set_next_page_params(self):
# def __init__(self, manager, endpoint, **params):
# def detect_mode(cls, **params):
# def set_next_page_params(self):
# def get_last_item_index(self):
# def get_next_page_param(self, item):
# def fetch_next(self):
# def get_next_page_param(self, item):
#
# Path: groupy/api/groups.py
# class Groups(base.Manager):
# class ChangeOwnersResult:
# class Group(base.ManagedResource):
# def __init__(self, session):
# def _raw_list(self, **params):
# def list(self, page=1, per_page=10, omit=None):
# def list_all(self, per_page=10, omit=None):
# def list_former(self):
# def get(self, id):
# def create(self, name, description=None, image_url=None, share=None, **kwargs):
# def update(self, id, name=None, description=None, image_url=None,
# office_mode=None, share=None, **kwargs):
# def destroy(self, id):
# def join(self, group_id, share_token):
# def rejoin(self, group_id):
# def change_owners(self, group_id, owner_id):
# def __init__(self, group_id, owner_id, status):
# def is_success(self):
# def __bool__(self):
# def __init__(self, manager, **data):
# def __repr__(self):
# def __eq__(self, other):
# def is_mine(self):
# def post(self, text=None, attachments=None):
# def update(self, name=None, description=None, image_url=None,
# office_mode=None, share=None, **kwargs):
# def destroy(self):
# def rejoin(self):
# def refresh_from_server(self):
# def create_bot(self, name, avatar_url=None, callback_url=None, dm_notification=None,
# **kwargs):
# def change_owners(self, user_id):
# def get_membership(self):
# def update_membership(self, nickname=None, **kwargs):
# def leave(self):
. Output only the next line. | def setUp(self): |
Given the code snippet: <|code_start|> self.assertEqual(self.group, group)
def test_different_group_id(self):
group = groups.Group(mock.Mock(), **get_fake_group_data())
group.group_id = 2 * self.group.group_id
self.assertNotEqual(self.group, group)
class GroupReprTests(GroupTests):
def test_repr(self):
representation = repr(self.group)
self.assertEqual(representation, "<Group(name='foobar')>")
class GroupPostTests(GroupTests):
def setUp(self):
super().setUp()
self.group.messages = mock.Mock()
self.result = self.group.post(text='foo')
def test_messages_used(self):
self.assertTrue(self.group.messages.create.called)
class GroupUpdateTests(GroupTests):
def setUp(self):
super().setUp()
self.group.update(name='foowho')
def test_manager_used(self):
<|code_end|>
, generate the next line using the imports in this file:
from unittest import mock
from .base import get_fake_response, get_fake_member_data, get_fake_group_data
from .base import TestCase
from groupy import pagers
from groupy.api import groups
and context (functions, classes, or occasionally code) from other files:
# Path: tests/api/base.py
# def get_fake_response(code=200, data=None):
# response = mock.Mock()
# response.status_code = code
# response.data = data
# return response
#
# def get_fake_member_data(**kwargs):
# data = {
# 'id': 'foo',
# 'user_id': 'baz',
# 'nickname': 'nick',
# }
# data.update(kwargs)
# return data
#
# def get_fake_group_data(**kwargs):
# group_data = {
# 'id': 'foo',
# 'name': 'foobar',
# 'group_id': 'bar',
# 'created_at': 1302623328,
# 'updated_at': 1302623329,
# 'office_mode': False,
# }
# group_data.update(kwargs)
# return group_data
#
# Path: tests/api/base.py
# class TestCase(unittest.TestCase):
# def assert_kwargs(self, mock, **kwargs):
# __, m_kwargs = mock.call_args
# for k, v in kwargs.items():
# with self.subTest(key=k, value=v):
# self.assertEqual(m_kwargs.get(k), v)
#
# Path: groupy/pagers.py
# class Pager:
# class GroupList(Pager):
# class ChatList(GroupList):
# class MessageList(Pager):
# class GalleryList(MessageList):
# def __init__(self, manager, endpoint, **params):
# def __getitem__(self, index):
# def __iter__(self):
# def set_next_page_params(self):
# def fetch(self):
# def fetch_next(self):
# def autopage(self):
# def set_next_page_params(self):
# def __init__(self, manager, endpoint, **params):
# def detect_mode(cls, **params):
# def set_next_page_params(self):
# def get_last_item_index(self):
# def get_next_page_param(self, item):
# def fetch_next(self):
# def get_next_page_param(self, item):
#
# Path: groupy/api/groups.py
# class Groups(base.Manager):
# class ChangeOwnersResult:
# class Group(base.ManagedResource):
# def __init__(self, session):
# def _raw_list(self, **params):
# def list(self, page=1, per_page=10, omit=None):
# def list_all(self, per_page=10, omit=None):
# def list_former(self):
# def get(self, id):
# def create(self, name, description=None, image_url=None, share=None, **kwargs):
# def update(self, id, name=None, description=None, image_url=None,
# office_mode=None, share=None, **kwargs):
# def destroy(self, id):
# def join(self, group_id, share_token):
# def rejoin(self, group_id):
# def change_owners(self, group_id, owner_id):
# def __init__(self, group_id, owner_id, status):
# def is_success(self):
# def __bool__(self):
# def __init__(self, manager, **data):
# def __repr__(self):
# def __eq__(self, other):
# def is_mine(self):
# def post(self, text=None, attachments=None):
# def update(self, name=None, description=None, image_url=None,
# office_mode=None, share=None, **kwargs):
# def destroy(self):
# def rejoin(self):
# def refresh_from_server(self):
# def create_bot(self, name, avatar_url=None, callback_url=None, dm_notification=None,
# **kwargs):
# def change_owners(self, user_id):
# def get_membership(self):
# def update_membership(self, nickname=None, **kwargs):
# def leave(self):
. Output only the next line. | self.assertTrue(self.group.manager.update.called) |
Based on the snippet: <|code_start|>
class GroupTests(TestCase):
def setUp(self):
self.group = groups.Group(mock.Mock(), **get_fake_group_data())
class GroupEqualityTests(GroupTests):
def test_same_group_id(self):
group = groups.Group(mock.Mock(), **get_fake_group_data())
self.assertEqual(self.group, group)
def test_different_group_id(self):
group = groups.Group(mock.Mock(), **get_fake_group_data())
group.group_id = 2 * self.group.group_id
self.assertNotEqual(self.group, group)
class GroupReprTests(GroupTests):
def test_repr(self):
representation = repr(self.group)
self.assertEqual(representation, "<Group(name='foobar')>")
class GroupPostTests(GroupTests):
def setUp(self):
super().setUp()
self.group.messages = mock.Mock()
self.result = self.group.post(text='foo')
def test_messages_used(self):
<|code_end|>
, predict the immediate next line with the help of imports:
from unittest import mock
from .base import get_fake_response, get_fake_member_data, get_fake_group_data
from .base import TestCase
from groupy import pagers
from groupy.api import groups
and context (classes, functions, sometimes code) from other files:
# Path: tests/api/base.py
# def get_fake_response(code=200, data=None):
# response = mock.Mock()
# response.status_code = code
# response.data = data
# return response
#
# def get_fake_member_data(**kwargs):
# data = {
# 'id': 'foo',
# 'user_id': 'baz',
# 'nickname': 'nick',
# }
# data.update(kwargs)
# return data
#
# def get_fake_group_data(**kwargs):
# group_data = {
# 'id': 'foo',
# 'name': 'foobar',
# 'group_id': 'bar',
# 'created_at': 1302623328,
# 'updated_at': 1302623329,
# 'office_mode': False,
# }
# group_data.update(kwargs)
# return group_data
#
# Path: tests/api/base.py
# class TestCase(unittest.TestCase):
# def assert_kwargs(self, mock, **kwargs):
# __, m_kwargs = mock.call_args
# for k, v in kwargs.items():
# with self.subTest(key=k, value=v):
# self.assertEqual(m_kwargs.get(k), v)
#
# Path: groupy/pagers.py
# class Pager:
# class GroupList(Pager):
# class ChatList(GroupList):
# class MessageList(Pager):
# class GalleryList(MessageList):
# def __init__(self, manager, endpoint, **params):
# def __getitem__(self, index):
# def __iter__(self):
# def set_next_page_params(self):
# def fetch(self):
# def fetch_next(self):
# def autopage(self):
# def set_next_page_params(self):
# def __init__(self, manager, endpoint, **params):
# def detect_mode(cls, **params):
# def set_next_page_params(self):
# def get_last_item_index(self):
# def get_next_page_param(self, item):
# def fetch_next(self):
# def get_next_page_param(self, item):
#
# Path: groupy/api/groups.py
# class Groups(base.Manager):
# class ChangeOwnersResult:
# class Group(base.ManagedResource):
# def __init__(self, session):
# def _raw_list(self, **params):
# def list(self, page=1, per_page=10, omit=None):
# def list_all(self, per_page=10, omit=None):
# def list_former(self):
# def get(self, id):
# def create(self, name, description=None, image_url=None, share=None, **kwargs):
# def update(self, id, name=None, description=None, image_url=None,
# office_mode=None, share=None, **kwargs):
# def destroy(self, id):
# def join(self, group_id, share_token):
# def rejoin(self, group_id):
# def change_owners(self, group_id, owner_id):
# def __init__(self, group_id, owner_id, status):
# def is_success(self):
# def __bool__(self):
# def __init__(self, manager, **data):
# def __repr__(self):
# def __eq__(self, other):
# def is_mine(self):
# def post(self, text=None, attachments=None):
# def update(self, name=None, description=None, image_url=None,
# office_mode=None, share=None, **kwargs):
# def destroy(self):
# def rejoin(self):
# def refresh_from_server(self):
# def create_bot(self, name, avatar_url=None, callback_url=None, dm_notification=None,
# **kwargs):
# def change_owners(self, user_id):
# def get_membership(self):
# def update_membership(self, nickname=None, **kwargs):
# def leave(self):
. Output only the next line. | self.assertTrue(self.group.messages.create.called) |
Next line prediction: <|code_start|> def setUp(self):
super().setUp()
self.group.rejoin()
def test_manager_used(self):
self.assertTrue(self.group.manager.rejoin.called)
class GroupRefreshFromServerTests(GroupTests):
def setUp(self):
super().setUp()
self.members = [get_fake_member_data(), get_fake_member_data()]
refreshed_group = get_fake_group_data(name='qux', members=self.members)
self.group.manager.get.return_value = get_fake_response(data=refreshed_group)
self.group.refresh_from_server()
def test_manager_used(self):
self.assertTrue(self.group.manager.get.called)
def test_name_is_updated(self):
self.assertEqual(self.group.name, 'qux')
def test_members_is_updated(self):
self.assertEqual(len(self.group.members), len(self.members))
class UnsuccessfulChangeOwnersResultTests(TestCase):
known_codes = '400', '403', '404', '405'
def test_is_not_success(self):
<|code_end|>
. Use current file imports:
(from unittest import mock
from .base import get_fake_response, get_fake_member_data, get_fake_group_data
from .base import TestCase
from groupy import pagers
from groupy.api import groups)
and context including class names, function names, or small code snippets from other files:
# Path: tests/api/base.py
# def get_fake_response(code=200, data=None):
# response = mock.Mock()
# response.status_code = code
# response.data = data
# return response
#
# def get_fake_member_data(**kwargs):
# data = {
# 'id': 'foo',
# 'user_id': 'baz',
# 'nickname': 'nick',
# }
# data.update(kwargs)
# return data
#
# def get_fake_group_data(**kwargs):
# group_data = {
# 'id': 'foo',
# 'name': 'foobar',
# 'group_id': 'bar',
# 'created_at': 1302623328,
# 'updated_at': 1302623329,
# 'office_mode': False,
# }
# group_data.update(kwargs)
# return group_data
#
# Path: tests/api/base.py
# class TestCase(unittest.TestCase):
# def assert_kwargs(self, mock, **kwargs):
# __, m_kwargs = mock.call_args
# for k, v in kwargs.items():
# with self.subTest(key=k, value=v):
# self.assertEqual(m_kwargs.get(k), v)
#
# Path: groupy/pagers.py
# class Pager:
# class GroupList(Pager):
# class ChatList(GroupList):
# class MessageList(Pager):
# class GalleryList(MessageList):
# def __init__(self, manager, endpoint, **params):
# def __getitem__(self, index):
# def __iter__(self):
# def set_next_page_params(self):
# def fetch(self):
# def fetch_next(self):
# def autopage(self):
# def set_next_page_params(self):
# def __init__(self, manager, endpoint, **params):
# def detect_mode(cls, **params):
# def set_next_page_params(self):
# def get_last_item_index(self):
# def get_next_page_param(self, item):
# def fetch_next(self):
# def get_next_page_param(self, item):
#
# Path: groupy/api/groups.py
# class Groups(base.Manager):
# class ChangeOwnersResult:
# class Group(base.ManagedResource):
# def __init__(self, session):
# def _raw_list(self, **params):
# def list(self, page=1, per_page=10, omit=None):
# def list_all(self, per_page=10, omit=None):
# def list_former(self):
# def get(self, id):
# def create(self, name, description=None, image_url=None, share=None, **kwargs):
# def update(self, id, name=None, description=None, image_url=None,
# office_mode=None, share=None, **kwargs):
# def destroy(self, id):
# def join(self, group_id, share_token):
# def rejoin(self, group_id):
# def change_owners(self, group_id, owner_id):
# def __init__(self, group_id, owner_id, status):
# def is_success(self):
# def __bool__(self):
# def __init__(self, manager, **data):
# def __repr__(self):
# def __eq__(self, other):
# def is_mine(self):
# def post(self, text=None, attachments=None):
# def update(self, name=None, description=None, image_url=None,
# office_mode=None, share=None, **kwargs):
# def destroy(self):
# def rejoin(self):
# def refresh_from_server(self):
# def create_bot(self, name, avatar_url=None, callback_url=None, dm_notification=None,
# **kwargs):
# def change_owners(self, user_id):
# def get_membership(self):
# def update_membership(self, nickname=None, **kwargs):
# def leave(self):
. Output only the next line. | for code in self.known_codes: |
Predict the next line for this snippet: <|code_start|> self.assertTrue(all(isinstance(g, groups.Group) for g in self.results))
def test_results_is_a_list(self):
self.assertTrue(isinstance(self.results, list))
class ListCurrentGroupsTests(GroupsTests):
def setUp(self):
super().setUp()
group = get_fake_group_data()
response = get_fake_response(data=[group])
self.m_session.get.return_value = response
self.results = self.groups.list()
def test_results_is_a_GroupList(self):
self.assertTrue(isinstance(self.results, pagers.GroupList))
class GetGroupTests(GroupsTests):
def setUp(self):
super().setUp()
group = get_fake_group_data()
response = get_fake_response(data=group)
self.m_session.get.return_value = response
self.result = self.groups.get('foo')
def test_result_is_group(self):
self.assertTrue(isinstance(self.result, groups.Group))
<|code_end|>
with the help of current file imports:
from unittest import mock
from .base import get_fake_response, get_fake_member_data, get_fake_group_data
from .base import TestCase
from groupy import pagers
from groupy.api import groups
and context from other files:
# Path: tests/api/base.py
# def get_fake_response(code=200, data=None):
# response = mock.Mock()
# response.status_code = code
# response.data = data
# return response
#
# def get_fake_member_data(**kwargs):
# data = {
# 'id': 'foo',
# 'user_id': 'baz',
# 'nickname': 'nick',
# }
# data.update(kwargs)
# return data
#
# def get_fake_group_data(**kwargs):
# group_data = {
# 'id': 'foo',
# 'name': 'foobar',
# 'group_id': 'bar',
# 'created_at': 1302623328,
# 'updated_at': 1302623329,
# 'office_mode': False,
# }
# group_data.update(kwargs)
# return group_data
#
# Path: tests/api/base.py
# class TestCase(unittest.TestCase):
# def assert_kwargs(self, mock, **kwargs):
# __, m_kwargs = mock.call_args
# for k, v in kwargs.items():
# with self.subTest(key=k, value=v):
# self.assertEqual(m_kwargs.get(k), v)
#
# Path: groupy/pagers.py
# class Pager:
# class GroupList(Pager):
# class ChatList(GroupList):
# class MessageList(Pager):
# class GalleryList(MessageList):
# def __init__(self, manager, endpoint, **params):
# def __getitem__(self, index):
# def __iter__(self):
# def set_next_page_params(self):
# def fetch(self):
# def fetch_next(self):
# def autopage(self):
# def set_next_page_params(self):
# def __init__(self, manager, endpoint, **params):
# def detect_mode(cls, **params):
# def set_next_page_params(self):
# def get_last_item_index(self):
# def get_next_page_param(self, item):
# def fetch_next(self):
# def get_next_page_param(self, item):
#
# Path: groupy/api/groups.py
# class Groups(base.Manager):
# class ChangeOwnersResult:
# class Group(base.ManagedResource):
# def __init__(self, session):
# def _raw_list(self, **params):
# def list(self, page=1, per_page=10, omit=None):
# def list_all(self, per_page=10, omit=None):
# def list_former(self):
# def get(self, id):
# def create(self, name, description=None, image_url=None, share=None, **kwargs):
# def update(self, id, name=None, description=None, image_url=None,
# office_mode=None, share=None, **kwargs):
# def destroy(self, id):
# def join(self, group_id, share_token):
# def rejoin(self, group_id):
# def change_owners(self, group_id, owner_id):
# def __init__(self, group_id, owner_id, status):
# def is_success(self):
# def __bool__(self):
# def __init__(self, manager, **data):
# def __repr__(self):
# def __eq__(self, other):
# def is_mine(self):
# def post(self, text=None, attachments=None):
# def update(self, name=None, description=None, image_url=None,
# office_mode=None, share=None, **kwargs):
# def destroy(self):
# def rejoin(self):
# def refresh_from_server(self):
# def create_bot(self, name, avatar_url=None, callback_url=None, dm_notification=None,
# **kwargs):
# def change_owners(self, user_id):
# def get_membership(self):
# def update_membership(self, nickname=None, **kwargs):
# def leave(self):
, which may contain function names, class names, or code. Output only the next line. | class CreateGroupTests(GroupsTests): |
Next line prediction: <|code_start|> self.group.messages = mock.Mock()
self.result = self.group.post(text='foo')
def test_messages_used(self):
self.assertTrue(self.group.messages.create.called)
class GroupUpdateTests(GroupTests):
def setUp(self):
super().setUp()
self.group.update(name='foowho')
def test_manager_used(self):
self.assertTrue(self.group.manager.update.called)
class GroupDestroyTests(GroupTests):
def setUp(self):
super().setUp()
self.group.destroy()
def test_manager_used(self):
self.assertTrue(self.group.manager.destroy.called)
class GroupRejoinTests(GroupTests):
def setUp(self):
super().setUp()
self.group.rejoin()
<|code_end|>
. Use current file imports:
(from unittest import mock
from .base import get_fake_response, get_fake_member_data, get_fake_group_data
from .base import TestCase
from groupy import pagers
from groupy.api import groups)
and context including class names, function names, or small code snippets from other files:
# Path: tests/api/base.py
# def get_fake_response(code=200, data=None):
# response = mock.Mock()
# response.status_code = code
# response.data = data
# return response
#
# def get_fake_member_data(**kwargs):
# data = {
# 'id': 'foo',
# 'user_id': 'baz',
# 'nickname': 'nick',
# }
# data.update(kwargs)
# return data
#
# def get_fake_group_data(**kwargs):
# group_data = {
# 'id': 'foo',
# 'name': 'foobar',
# 'group_id': 'bar',
# 'created_at': 1302623328,
# 'updated_at': 1302623329,
# 'office_mode': False,
# }
# group_data.update(kwargs)
# return group_data
#
# Path: tests/api/base.py
# class TestCase(unittest.TestCase):
# def assert_kwargs(self, mock, **kwargs):
# __, m_kwargs = mock.call_args
# for k, v in kwargs.items():
# with self.subTest(key=k, value=v):
# self.assertEqual(m_kwargs.get(k), v)
#
# Path: groupy/pagers.py
# class Pager:
# class GroupList(Pager):
# class ChatList(GroupList):
# class MessageList(Pager):
# class GalleryList(MessageList):
# def __init__(self, manager, endpoint, **params):
# def __getitem__(self, index):
# def __iter__(self):
# def set_next_page_params(self):
# def fetch(self):
# def fetch_next(self):
# def autopage(self):
# def set_next_page_params(self):
# def __init__(self, manager, endpoint, **params):
# def detect_mode(cls, **params):
# def set_next_page_params(self):
# def get_last_item_index(self):
# def get_next_page_param(self, item):
# def fetch_next(self):
# def get_next_page_param(self, item):
#
# Path: groupy/api/groups.py
# class Groups(base.Manager):
# class ChangeOwnersResult:
# class Group(base.ManagedResource):
# def __init__(self, session):
# def _raw_list(self, **params):
# def list(self, page=1, per_page=10, omit=None):
# def list_all(self, per_page=10, omit=None):
# def list_former(self):
# def get(self, id):
# def create(self, name, description=None, image_url=None, share=None, **kwargs):
# def update(self, id, name=None, description=None, image_url=None,
# office_mode=None, share=None, **kwargs):
# def destroy(self, id):
# def join(self, group_id, share_token):
# def rejoin(self, group_id):
# def change_owners(self, group_id, owner_id):
# def __init__(self, group_id, owner_id, status):
# def is_success(self):
# def __bool__(self):
# def __init__(self, manager, **data):
# def __repr__(self):
# def __eq__(self, other):
# def is_mine(self):
# def post(self, text=None, attachments=None):
# def update(self, name=None, description=None, image_url=None,
# office_mode=None, share=None, **kwargs):
# def destroy(self):
# def rejoin(self):
# def refresh_from_server(self):
# def create_bot(self, name, avatar_url=None, callback_url=None, dm_notification=None,
# **kwargs):
# def change_owners(self, user_id):
# def get_membership(self):
# def update_membership(self, nickname=None, **kwargs):
# def leave(self):
. Output only the next line. | def test_manager_used(self): |
Given snippet: <|code_start|> __, kwargs = self.m_session.post.call_args
message = kwargs['json']['message']
self.assertEqual(message['text'], 'qux')
def test_payload_lacks_attachments(self):
__, kwargs = self.m_session.post.call_args
message = kwargs['json']['message']
self.assertNotIn('attachments', message)
class CreateAttachmentMessagesTests(MessagesTests):
def setUp(self):
super().setUp()
message = base.get_fake_message_data()
response = base.get_fake_response(data={'message': message})
self.m_session.post.return_value = response
m_attachment = mock.Mock()
m_attachment.to_json.return_value = {'qux': 'quux'}
self.result = self.messages.create(attachments=[m_attachment])
def test_result_is_message(self):
self.assertTrue(isinstance(self.result, messages.Message))
def test_payload_contains_attachments(self):
__, kwargs = self.m_session.post.call_args
message = kwargs['json']['message']
self.assertEqual(message['attachments'], [{'qux': 'quux'}])
def test_payload_lacks_text(self):
__, kwargs = self.m_session.post.call_args
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from unittest import mock
from datetime import datetime
from groupy import utils
from groupy.api import attachments
from groupy.api import messages
from . import base
and context:
# Path: groupy/utils.py
# def urljoin(base, path=None):
# def parse_share_url(share_url):
# def get_rfc3339(when):
# def get_datetime(timestamp):
# def __init__(self, key, value):
# def __repr__(self):
# def __call__(self, obj):
# def __init__(self, tests):
# def __call__(self, objects):
# def find(self, objects):
# def passes(self, obj):
# def make_filter(**tests):
# class AttrTest:
# class Filter:
#
# Path: groupy/api/attachments.py
# class AttachmentMeta(type):
# class Attachment(base.Resource, metaclass=AttachmentMeta):
# class Location(Attachment):
# class Split(Attachment):
# class Emoji(Attachment):
# class Mentions(Attachment):
# class Image(Attachment):
# class LinkedImage(Image):
# class Images(base.Manager):
# def __init__(cls, name, bases, attrs):
# def __init__(self, type, **data):
# def to_json(self):
# def from_data(cls, type, **data):
# def from_bulk_data(cls, attachments):
# def __init__(self, lat, lng, name, foursqure_venue_id=None):
# def __init__(self, token):
# def __init__(self, placeholder, charmap):
# def __init__(self, loci=None, user_ids=None):
# def __init__(self, url, source_url=None, file_id=None):
# def from_file(self, fp):
# def upload(self, fp):
# def download(self, image, url_field='url', suffix=None):
# def download_preview(self, image, url_field='url'):
# def download_large(self, image, url_field='url'):
# def download_avatar(self, image, url_field='url'):
#
# Path: groupy/api/messages.py
# class Messages(base.Manager):
# class DirectMessages(base.Manager):
# class GenericMessage(base.ManagedResource):
# class Message(GenericMessage):
# class DirectMessage(GenericMessage):
# class Leaderboard(base.Manager):
# class Likes(base.Manager):
# class Gallery(base.Manager):
# def __init__(self, session, group_id):
# def _raw_list(self, **params):
# def list(self, before_id=None, since_id=None, after_id=None, limit=20):
# def list_before(self, message_id, limit=None):
# def list_since(self, message_id, limit=None):
# def list_after(self, message_id, limit=None):
# def list_all(self, limit=None):
# def list_all_before(self, message_id, limit=None):
# def list_all_after(self, message_id, limit=None):
# def create(self, text=None, attachments=None, source_guid=None):
# def __init__(self, session, other_user_id):
# def _raw_list(self, **params):
# def list(self, before_id=None, since_id=None, **kwargs):
# def list_before(self, message_id, **kwargs):
# def list_since(self, message_id, **kwargs):
# def list_all(self, before_id=None, since_id=None, **kwargs):
# def list_all_before(self, message_id, **kwargs):
# def create(self, text=None, attachments=None, source_guid=None):
# def __init__(self, manager, conversation_id, **data):
# def __repr__(self):
# def __eq__(self, other):
# def like(self):
# def unlike(self):
# def __init__(self, manager, **data):
# def __init__(self, manager, **data):
# def get_conversation_id(data):
# def __init__(self, session, group_id):
# def _get_messages(self, path=None, **params):
# def list(self, period):
# def list_day(self):
# def list_week(self):
# def list_month(self):
# def list_mine(self):
# def list_for_me(self):
# def __init__(self, session, conversation_id, message_id):
# def like(self):
# def unlike(self):
# def __init__(self, session, group_id):
# def _raw_list(self, **params):
# def _convert_to_rfc3339(self, when=None):
# def list(self, before=None, since=None, after=None, limit=100):
# def list_before(self, when, limit=100):
# def list_since(self, when, limit=100):
# def list_after(self, when, limit=100):
# def list_all(self, **params):
# def list_all_before(self, when, limit=100):
# def list_all_after(self, when, limit=100):
which might include code, classes, or functions. Output only the next line. | message = kwargs['json']['message'] |
Given the code snippet: <|code_start|>
class AttachmentTests(base.TestCase):
def setUp(self):
self.m_manager = mock.Mock()
class AttachmentToJsonTests(AttachmentTests):
def test_json_is_correct(self):
a = messages.Attachment(type='foo', text='bar')
self.assertEqual(a.to_json(), {'type': 'foo', 'text': 'bar'})
class AttachmentsFromBulkDataTests(AttachmentTests):
def setUp(self):
super().setUp()
self.data = [
{'type': 'unknown', 'foo': 'bar'},
{'type': 'location', 'lat': 4, 'lng': 2, 'name': 'baz'},
]
self.attachments = attachments.Attachment.from_bulk_data(self.data)
def test_attachment_one_is_attachment(self):
self.assertEqual(type(self.attachments[0]), attachments.Attachment)
def test_attachment_two_is_location(self):
self.assertIsInstance(self.attachments[1], attachments.Location)
class LeaderboardTests(base.TestCase):
def setUp(self):
<|code_end|>
, generate the next line using the imports in this file:
from unittest import mock
from datetime import datetime
from groupy import utils
from groupy.api import attachments
from groupy.api import messages
from . import base
and context (functions, classes, or occasionally code) from other files:
# Path: groupy/utils.py
# def urljoin(base, path=None):
# def parse_share_url(share_url):
# def get_rfc3339(when):
# def get_datetime(timestamp):
# def __init__(self, key, value):
# def __repr__(self):
# def __call__(self, obj):
# def __init__(self, tests):
# def __call__(self, objects):
# def find(self, objects):
# def passes(self, obj):
# def make_filter(**tests):
# class AttrTest:
# class Filter:
#
# Path: groupy/api/attachments.py
# class AttachmentMeta(type):
# class Attachment(base.Resource, metaclass=AttachmentMeta):
# class Location(Attachment):
# class Split(Attachment):
# class Emoji(Attachment):
# class Mentions(Attachment):
# class Image(Attachment):
# class LinkedImage(Image):
# class Images(base.Manager):
# def __init__(cls, name, bases, attrs):
# def __init__(self, type, **data):
# def to_json(self):
# def from_data(cls, type, **data):
# def from_bulk_data(cls, attachments):
# def __init__(self, lat, lng, name, foursqure_venue_id=None):
# def __init__(self, token):
# def __init__(self, placeholder, charmap):
# def __init__(self, loci=None, user_ids=None):
# def __init__(self, url, source_url=None, file_id=None):
# def from_file(self, fp):
# def upload(self, fp):
# def download(self, image, url_field='url', suffix=None):
# def download_preview(self, image, url_field='url'):
# def download_large(self, image, url_field='url'):
# def download_avatar(self, image, url_field='url'):
#
# Path: groupy/api/messages.py
# class Messages(base.Manager):
# class DirectMessages(base.Manager):
# class GenericMessage(base.ManagedResource):
# class Message(GenericMessage):
# class DirectMessage(GenericMessage):
# class Leaderboard(base.Manager):
# class Likes(base.Manager):
# class Gallery(base.Manager):
# def __init__(self, session, group_id):
# def _raw_list(self, **params):
# def list(self, before_id=None, since_id=None, after_id=None, limit=20):
# def list_before(self, message_id, limit=None):
# def list_since(self, message_id, limit=None):
# def list_after(self, message_id, limit=None):
# def list_all(self, limit=None):
# def list_all_before(self, message_id, limit=None):
# def list_all_after(self, message_id, limit=None):
# def create(self, text=None, attachments=None, source_guid=None):
# def __init__(self, session, other_user_id):
# def _raw_list(self, **params):
# def list(self, before_id=None, since_id=None, **kwargs):
# def list_before(self, message_id, **kwargs):
# def list_since(self, message_id, **kwargs):
# def list_all(self, before_id=None, since_id=None, **kwargs):
# def list_all_before(self, message_id, **kwargs):
# def create(self, text=None, attachments=None, source_guid=None):
# def __init__(self, manager, conversation_id, **data):
# def __repr__(self):
# def __eq__(self, other):
# def like(self):
# def unlike(self):
# def __init__(self, manager, **data):
# def __init__(self, manager, **data):
# def get_conversation_id(data):
# def __init__(self, session, group_id):
# def _get_messages(self, path=None, **params):
# def list(self, period):
# def list_day(self):
# def list_week(self):
# def list_month(self):
# def list_mine(self):
# def list_for_me(self):
# def __init__(self, session, conversation_id, message_id):
# def like(self):
# def unlike(self):
# def __init__(self, session, group_id):
# def _raw_list(self, **params):
# def _convert_to_rfc3339(self, when=None):
# def list(self, before=None, since=None, after=None, limit=100):
# def list_before(self, when, limit=100):
# def list_since(self, when, limit=100):
# def list_after(self, when, limit=100):
# def list_all(self, **params):
# def list_all_before(self, when, limit=100):
# def list_all_after(self, when, limit=100):
. Output only the next line. | self.m_session = mock.Mock() |
Continue the code snippet: <|code_start|>
class LikeGenericMessageTests(GenericMessageTests):
def setUp(self):
super().setUp()
self.message._likes = mock.Mock()
def test_like_uses_likes(self):
self.message.like()
self.assertTrue(self.message._likes.like.called)
def test_unlike_uses_likes(self):
self.message.unlike()
self.assertTrue(self.message._likes.unlike.called)
class GenericMessageReprTests(GenericMessageTests):
def test_repr(self):
representation = repr(self.message)
self.assertEqual(representation, "<GenericMessage(name='Alice', "
"text='corge', attachments=0)>")
class MessageTests(base.TestCase):
def setUp(self):
self.m_manager = mock.Mock()
data = base.get_fake_message_data()
self.message = messages.Message(self.m_manager, **data)
def test_conversation_id_is_group_id(self):
<|code_end|>
. Use current file imports:
from unittest import mock
from datetime import datetime
from groupy import utils
from groupy.api import attachments
from groupy.api import messages
from . import base
and context (classes, functions, or code) from other files:
# Path: groupy/utils.py
# def urljoin(base, path=None):
# def parse_share_url(share_url):
# def get_rfc3339(when):
# def get_datetime(timestamp):
# def __init__(self, key, value):
# def __repr__(self):
# def __call__(self, obj):
# def __init__(self, tests):
# def __call__(self, objects):
# def find(self, objects):
# def passes(self, obj):
# def make_filter(**tests):
# class AttrTest:
# class Filter:
#
# Path: groupy/api/attachments.py
# class AttachmentMeta(type):
# class Attachment(base.Resource, metaclass=AttachmentMeta):
# class Location(Attachment):
# class Split(Attachment):
# class Emoji(Attachment):
# class Mentions(Attachment):
# class Image(Attachment):
# class LinkedImage(Image):
# class Images(base.Manager):
# def __init__(cls, name, bases, attrs):
# def __init__(self, type, **data):
# def to_json(self):
# def from_data(cls, type, **data):
# def from_bulk_data(cls, attachments):
# def __init__(self, lat, lng, name, foursqure_venue_id=None):
# def __init__(self, token):
# def __init__(self, placeholder, charmap):
# def __init__(self, loci=None, user_ids=None):
# def __init__(self, url, source_url=None, file_id=None):
# def from_file(self, fp):
# def upload(self, fp):
# def download(self, image, url_field='url', suffix=None):
# def download_preview(self, image, url_field='url'):
# def download_large(self, image, url_field='url'):
# def download_avatar(self, image, url_field='url'):
#
# Path: groupy/api/messages.py
# class Messages(base.Manager):
# class DirectMessages(base.Manager):
# class GenericMessage(base.ManagedResource):
# class Message(GenericMessage):
# class DirectMessage(GenericMessage):
# class Leaderboard(base.Manager):
# class Likes(base.Manager):
# class Gallery(base.Manager):
# def __init__(self, session, group_id):
# def _raw_list(self, **params):
# def list(self, before_id=None, since_id=None, after_id=None, limit=20):
# def list_before(self, message_id, limit=None):
# def list_since(self, message_id, limit=None):
# def list_after(self, message_id, limit=None):
# def list_all(self, limit=None):
# def list_all_before(self, message_id, limit=None):
# def list_all_after(self, message_id, limit=None):
# def create(self, text=None, attachments=None, source_guid=None):
# def __init__(self, session, other_user_id):
# def _raw_list(self, **params):
# def list(self, before_id=None, since_id=None, **kwargs):
# def list_before(self, message_id, **kwargs):
# def list_since(self, message_id, **kwargs):
# def list_all(self, before_id=None, since_id=None, **kwargs):
# def list_all_before(self, message_id, **kwargs):
# def create(self, text=None, attachments=None, source_guid=None):
# def __init__(self, manager, conversation_id, **data):
# def __repr__(self):
# def __eq__(self, other):
# def like(self):
# def unlike(self):
# def __init__(self, manager, **data):
# def __init__(self, manager, **data):
# def get_conversation_id(data):
# def __init__(self, session, group_id):
# def _get_messages(self, path=None, **params):
# def list(self, period):
# def list_day(self):
# def list_week(self):
# def list_month(self):
# def list_mine(self):
# def list_for_me(self):
# def __init__(self, session, conversation_id, message_id):
# def like(self):
# def unlike(self):
# def __init__(self, session, group_id):
# def _raw_list(self, **params):
# def _convert_to_rfc3339(self, when=None):
# def list(self, before=None, since=None, after=None, limit=100):
# def list_before(self, when, limit=100):
# def list_since(self, when, limit=100):
# def list_after(self, when, limit=100):
# def list_all(self, **params):
# def list_all_before(self, when, limit=100):
# def list_all_after(self, when, limit=100):
. Output only the next line. | self.assertEqual(self.message.conversation_id, self.message.group_id) |
Given snippet: <|code_start|> def __init__(self, session):
super().__init__(session, 'users')
self._me = None
self._blocks = None
self.sms_mode = SmsMode(self.session)
@property
def blocks(self):
if self._blocks is None:
self._blocks = blocks.Blocks(self.session, self.me['id'])
return self._blocks
@property
def me(self):
if self._me is None:
self._me = self.get_me()
return self._me
def get_me(self):
url = utils.urljoin(self.url, 'me')
response = self.session.get(url)
return response.data
def update(self, **params):
url = utils.urljoin(self.url, 'update')
response = self.session.post(url, json=params)
return response.data
class SmsMode(base.Manager):
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from . import base
from . import blocks
from groupy import utils
and context:
# Path: groupy/utils.py
# def urljoin(base, path=None):
# def parse_share_url(share_url):
# def get_rfc3339(when):
# def get_datetime(timestamp):
# def __init__(self, key, value):
# def __repr__(self):
# def __call__(self, obj):
# def __init__(self, tests):
# def __call__(self, objects):
# def find(self, objects):
# def passes(self, obj):
# def make_filter(**tests):
# class AttrTest:
# class Filter:
which might include code, classes, or functions. Output only the next line. | def __init__(self, session): |
Using the snippet: <|code_start|>
class TestAttachmentsFromData(unittest.TestCase):
def test_known_attachment_type(self):
data = {'type': 'split', 'token': 'foo'}
attachment = attachments.Attachment.from_data(**data)
self.assertIsInstance(attachment, attachments.Split)
def test_unknown_attachment_type(self):
data = {'type': 'foo', 'bar': 'baz'}
attachment = attachments.Attachment.from_data(**data)
self.assertIsInstance(attachment, attachments.Attachment)
<|code_end|>
, determine the next line of code. You have imports:
import unittest
from groupy.api import attachments
and context (class names, function names, or code) available:
# Path: groupy/api/attachments.py
# class AttachmentMeta(type):
# class Attachment(base.Resource, metaclass=AttachmentMeta):
# class Location(Attachment):
# class Split(Attachment):
# class Emoji(Attachment):
# class Mentions(Attachment):
# class Image(Attachment):
# class LinkedImage(Image):
# class Images(base.Manager):
# def __init__(cls, name, bases, attrs):
# def __init__(self, type, **data):
# def to_json(self):
# def from_data(cls, type, **data):
# def from_bulk_data(cls, attachments):
# def __init__(self, lat, lng, name, foursqure_venue_id=None):
# def __init__(self, token):
# def __init__(self, placeholder, charmap):
# def __init__(self, loci=None, user_ids=None):
# def __init__(self, url, source_url=None, file_id=None):
# def from_file(self, fp):
# def upload(self, fp):
# def download(self, image, url_field='url', suffix=None):
# def download_preview(self, image, url_field='url'):
# def download_large(self, image, url_field='url'):
# def download_avatar(self, image, url_field='url'):
. Output only the next line. | def test_known_attachment_type_with_unknown_field(self): |
Given the code snippet: <|code_start|>
class SessionTests(unittest.TestCase):
def setUp(self):
self.token = 'abc123'
self.session = session.Session(self.token)
self.url = 'https://example.com/foo'
@responses.activate
<|code_end|>
, generate the next line using the imports in this file:
import unittest
import requests
import responses
from unittest import mock
from groupy import session
from groupy.exceptions import BadResponse
from groupy.exceptions import InvalidJsonError
from groupy.exceptions import MissingMetaError
from groupy.exceptions import MissingResponseError
from groupy.exceptions import NoResponse
and context (functions, classes, or occasionally code) from other files:
# Path: groupy/session.py
# class Session(requests.Session):
# class Response:
# def __init__(self, token):
# def request(self, *args, **kwargs):
# def __init__(self, response):
# def __getattr__(self, attr):
# def data(self):
# def errors(self):
#
# Path: groupy/exceptions.py
# class BadResponse(ApiError):
# """Exception raised when the status code of the response was 400 or more.
#
# :param response: the response
# :type response: :class:`~requests.Response`
# :param str message: a description of the exception
# """
#
# message = 'Got a bad response'
#
# def __init__(self, response, message=None):
# if message is None:
# message = self._extract_message(response)
# super().__init__(message=message)
# self.response = response
#
# def _extract_message(self, response):
# try:
# meta = response.json()['meta']
# code = meta.get('code', response.status_code)
# errors = ','.join(meta.get('errors', ['unknown']))
# except (ValueError, KeyError):
# return None
# return 'HTTP {code}: {errors}'.format(code=code, errors=errors)
#
# Path: groupy/exceptions.py
# class InvalidJsonError(BadResponse):
# """Exception raised for incomplete/invalid JSON in a response."""
#
# def __init__(self, response, message='The JSON was incomplete/invalid'):
# super().__init__(response, message)
#
# Path: groupy/exceptions.py
# class MissingMetaError(BadResponse):
# """Exception raised for a response that lacks meta data."""
#
# def __init__(self, response, message='The response contained no meta data'):
# super().__init__(response, message)
#
# Path: groupy/exceptions.py
# class MissingResponseError(BadResponse):
# """Exception raised for a response that lacks response data."""
#
# def __init__(self, response, message='The response contained no response data'):
# super().__init__(response, message)
#
# Path: groupy/exceptions.py
# class NoResponse(ApiError):
# """Exception raised when the API server could not be reached.
#
# :param request: the original request that was made
# :type request: :class:`~requests.PreparedRequest`
# :param str message: a description of the exception
# """
#
# message = 'Could not get a response'
#
# def __init__(self, request, *args, **kwargs):
# super().__init__(*args, **kwargs)
# self.request = request
. Output only the next line. | def test_token_is_present_in_headers(self): |
Given the following code snippet before the placeholder: <|code_start|> self.url = 'https://example.com/foo'
@responses.activate
def test_token_is_present_in_headers(self):
responses.add(responses.GET, self.url)
self.session.get(self.url)
self.assertEqual(responses.calls[0].request.headers['x-access-token'],
self.token)
def test_content_type_is_json(self):
headers = self.session.headers or {}
self.assertIn('content-type', headers)
content_type = headers['content-type']
self.assertEqual(content_type.lower(), 'application/json')
@responses.activate
def test_bad_response(self):
responses.add(responses.GET, self.url, status=503)
with self.assertRaises(BadResponse):
self.session.get(self.url)
@responses.activate
def test_no_response(self):
responses.add(responses.GET, self.url,
body=requests.exceptions.ConnectionError())
with self.assertRaises(NoResponse):
self.session.get(self.url)
class RequestTests(unittest.TestCase):
<|code_end|>
, predict the next line using imports from the current file:
import unittest
import requests
import responses
from unittest import mock
from groupy import session
from groupy.exceptions import BadResponse
from groupy.exceptions import InvalidJsonError
from groupy.exceptions import MissingMetaError
from groupy.exceptions import MissingResponseError
from groupy.exceptions import NoResponse
and context including class names, function names, and sometimes code from other files:
# Path: groupy/session.py
# class Session(requests.Session):
# class Response:
# def __init__(self, token):
# def request(self, *args, **kwargs):
# def __init__(self, response):
# def __getattr__(self, attr):
# def data(self):
# def errors(self):
#
# Path: groupy/exceptions.py
# class BadResponse(ApiError):
# """Exception raised when the status code of the response was 400 or more.
#
# :param response: the response
# :type response: :class:`~requests.Response`
# :param str message: a description of the exception
# """
#
# message = 'Got a bad response'
#
# def __init__(self, response, message=None):
# if message is None:
# message = self._extract_message(response)
# super().__init__(message=message)
# self.response = response
#
# def _extract_message(self, response):
# try:
# meta = response.json()['meta']
# code = meta.get('code', response.status_code)
# errors = ','.join(meta.get('errors', ['unknown']))
# except (ValueError, KeyError):
# return None
# return 'HTTP {code}: {errors}'.format(code=code, errors=errors)
#
# Path: groupy/exceptions.py
# class InvalidJsonError(BadResponse):
# """Exception raised for incomplete/invalid JSON in a response."""
#
# def __init__(self, response, message='The JSON was incomplete/invalid'):
# super().__init__(response, message)
#
# Path: groupy/exceptions.py
# class MissingMetaError(BadResponse):
# """Exception raised for a response that lacks meta data."""
#
# def __init__(self, response, message='The response contained no meta data'):
# super().__init__(response, message)
#
# Path: groupy/exceptions.py
# class MissingResponseError(BadResponse):
# """Exception raised for a response that lacks response data."""
#
# def __init__(self, response, message='The response contained no response data'):
# super().__init__(response, message)
#
# Path: groupy/exceptions.py
# class NoResponse(ApiError):
# """Exception raised when the API server could not be reached.
#
# :param request: the original request that was made
# :type request: :class:`~requests.PreparedRequest`
# :param str message: a description of the exception
# """
#
# message = 'Could not get a response'
#
# def __init__(self, request, *args, **kwargs):
# super().__init__(*args, **kwargs)
# self.request = request
. Output only the next line. | @responses.activate |
Continue the code snippet: <|code_start|> def test_token_is_present_in_headers(self):
responses.add(responses.GET, self.url)
self.session.get(self.url)
self.assertEqual(responses.calls[0].request.headers['x-access-token'],
self.token)
def test_content_type_is_json(self):
headers = self.session.headers or {}
self.assertIn('content-type', headers)
content_type = headers['content-type']
self.assertEqual(content_type.lower(), 'application/json')
@responses.activate
def test_bad_response(self):
responses.add(responses.GET, self.url, status=503)
with self.assertRaises(BadResponse):
self.session.get(self.url)
@responses.activate
def test_no_response(self):
responses.add(responses.GET, self.url,
body=requests.exceptions.ConnectionError())
with self.assertRaises(NoResponse):
self.session.get(self.url)
class RequestTests(unittest.TestCase):
@responses.activate
def setUp(self):
self.session = session.Session('abc123')
<|code_end|>
. Use current file imports:
import unittest
import requests
import responses
from unittest import mock
from groupy import session
from groupy.exceptions import BadResponse
from groupy.exceptions import InvalidJsonError
from groupy.exceptions import MissingMetaError
from groupy.exceptions import MissingResponseError
from groupy.exceptions import NoResponse
and context (classes, functions, or code) from other files:
# Path: groupy/session.py
# class Session(requests.Session):
# class Response:
# def __init__(self, token):
# def request(self, *args, **kwargs):
# def __init__(self, response):
# def __getattr__(self, attr):
# def data(self):
# def errors(self):
#
# Path: groupy/exceptions.py
# class BadResponse(ApiError):
# """Exception raised when the status code of the response was 400 or more.
#
# :param response: the response
# :type response: :class:`~requests.Response`
# :param str message: a description of the exception
# """
#
# message = 'Got a bad response'
#
# def __init__(self, response, message=None):
# if message is None:
# message = self._extract_message(response)
# super().__init__(message=message)
# self.response = response
#
# def _extract_message(self, response):
# try:
# meta = response.json()['meta']
# code = meta.get('code', response.status_code)
# errors = ','.join(meta.get('errors', ['unknown']))
# except (ValueError, KeyError):
# return None
# return 'HTTP {code}: {errors}'.format(code=code, errors=errors)
#
# Path: groupy/exceptions.py
# class InvalidJsonError(BadResponse):
# """Exception raised for incomplete/invalid JSON in a response."""
#
# def __init__(self, response, message='The JSON was incomplete/invalid'):
# super().__init__(response, message)
#
# Path: groupy/exceptions.py
# class MissingMetaError(BadResponse):
# """Exception raised for a response that lacks meta data."""
#
# def __init__(self, response, message='The response contained no meta data'):
# super().__init__(response, message)
#
# Path: groupy/exceptions.py
# class MissingResponseError(BadResponse):
# """Exception raised for a response that lacks response data."""
#
# def __init__(self, response, message='The response contained no response data'):
# super().__init__(response, message)
#
# Path: groupy/exceptions.py
# class NoResponse(ApiError):
# """Exception raised when the API server could not be reached.
#
# :param request: the original request that was made
# :type request: :class:`~requests.PreparedRequest`
# :param str message: a description of the exception
# """
#
# message = 'Could not get a response'
#
# def __init__(self, request, *args, **kwargs):
# super().__init__(*args, **kwargs)
# self.request = request
. Output only the next line. | self.url = 'https://example.com/foo' |
Next line prediction: <|code_start|> self.session = session.Session(self.token)
self.url = 'https://example.com/foo'
@responses.activate
def test_token_is_present_in_headers(self):
responses.add(responses.GET, self.url)
self.session.get(self.url)
self.assertEqual(responses.calls[0].request.headers['x-access-token'],
self.token)
def test_content_type_is_json(self):
headers = self.session.headers or {}
self.assertIn('content-type', headers)
content_type = headers['content-type']
self.assertEqual(content_type.lower(), 'application/json')
@responses.activate
def test_bad_response(self):
responses.add(responses.GET, self.url, status=503)
with self.assertRaises(BadResponse):
self.session.get(self.url)
@responses.activate
def test_no_response(self):
responses.add(responses.GET, self.url,
body=requests.exceptions.ConnectionError())
with self.assertRaises(NoResponse):
self.session.get(self.url)
<|code_end|>
. Use current file imports:
(import unittest
import requests
import responses
from unittest import mock
from groupy import session
from groupy.exceptions import BadResponse
from groupy.exceptions import InvalidJsonError
from groupy.exceptions import MissingMetaError
from groupy.exceptions import MissingResponseError
from groupy.exceptions import NoResponse)
and context including class names, function names, or small code snippets from other files:
# Path: groupy/session.py
# class Session(requests.Session):
# class Response:
# def __init__(self, token):
# def request(self, *args, **kwargs):
# def __init__(self, response):
# def __getattr__(self, attr):
# def data(self):
# def errors(self):
#
# Path: groupy/exceptions.py
# class BadResponse(ApiError):
# """Exception raised when the status code of the response was 400 or more.
#
# :param response: the response
# :type response: :class:`~requests.Response`
# :param str message: a description of the exception
# """
#
# message = 'Got a bad response'
#
# def __init__(self, response, message=None):
# if message is None:
# message = self._extract_message(response)
# super().__init__(message=message)
# self.response = response
#
# def _extract_message(self, response):
# try:
# meta = response.json()['meta']
# code = meta.get('code', response.status_code)
# errors = ','.join(meta.get('errors', ['unknown']))
# except (ValueError, KeyError):
# return None
# return 'HTTP {code}: {errors}'.format(code=code, errors=errors)
#
# Path: groupy/exceptions.py
# class InvalidJsonError(BadResponse):
# """Exception raised for incomplete/invalid JSON in a response."""
#
# def __init__(self, response, message='The JSON was incomplete/invalid'):
# super().__init__(response, message)
#
# Path: groupy/exceptions.py
# class MissingMetaError(BadResponse):
# """Exception raised for a response that lacks meta data."""
#
# def __init__(self, response, message='The response contained no meta data'):
# super().__init__(response, message)
#
# Path: groupy/exceptions.py
# class MissingResponseError(BadResponse):
# """Exception raised for a response that lacks response data."""
#
# def __init__(self, response, message='The response contained no response data'):
# super().__init__(response, message)
#
# Path: groupy/exceptions.py
# class NoResponse(ApiError):
# """Exception raised when the API server could not be reached.
#
# :param request: the original request that was made
# :type request: :class:`~requests.PreparedRequest`
# :param str message: a description of the exception
# """
#
# message = 'Could not get a response'
#
# def __init__(self, request, *args, **kwargs):
# super().__init__(*args, **kwargs)
# self.request = request
. Output only the next line. | class RequestTests(unittest.TestCase): |
Predict the next line for this snippet: <|code_start|>
class SessionTests(unittest.TestCase):
def setUp(self):
self.token = 'abc123'
self.session = session.Session(self.token)
self.url = 'https://example.com/foo'
@responses.activate
<|code_end|>
with the help of current file imports:
import unittest
import requests
import responses
from unittest import mock
from groupy import session
from groupy.exceptions import BadResponse
from groupy.exceptions import InvalidJsonError
from groupy.exceptions import MissingMetaError
from groupy.exceptions import MissingResponseError
from groupy.exceptions import NoResponse
and context from other files:
# Path: groupy/session.py
# class Session(requests.Session):
# class Response:
# def __init__(self, token):
# def request(self, *args, **kwargs):
# def __init__(self, response):
# def __getattr__(self, attr):
# def data(self):
# def errors(self):
#
# Path: groupy/exceptions.py
# class BadResponse(ApiError):
# """Exception raised when the status code of the response was 400 or more.
#
# :param response: the response
# :type response: :class:`~requests.Response`
# :param str message: a description of the exception
# """
#
# message = 'Got a bad response'
#
# def __init__(self, response, message=None):
# if message is None:
# message = self._extract_message(response)
# super().__init__(message=message)
# self.response = response
#
# def _extract_message(self, response):
# try:
# meta = response.json()['meta']
# code = meta.get('code', response.status_code)
# errors = ','.join(meta.get('errors', ['unknown']))
# except (ValueError, KeyError):
# return None
# return 'HTTP {code}: {errors}'.format(code=code, errors=errors)
#
# Path: groupy/exceptions.py
# class InvalidJsonError(BadResponse):
# """Exception raised for incomplete/invalid JSON in a response."""
#
# def __init__(self, response, message='The JSON was incomplete/invalid'):
# super().__init__(response, message)
#
# Path: groupy/exceptions.py
# class MissingMetaError(BadResponse):
# """Exception raised for a response that lacks meta data."""
#
# def __init__(self, response, message='The response contained no meta data'):
# super().__init__(response, message)
#
# Path: groupy/exceptions.py
# class MissingResponseError(BadResponse):
# """Exception raised for a response that lacks response data."""
#
# def __init__(self, response, message='The response contained no response data'):
# super().__init__(response, message)
#
# Path: groupy/exceptions.py
# class NoResponse(ApiError):
# """Exception raised when the API server could not be reached.
#
# :param request: the original request that was made
# :type request: :class:`~requests.PreparedRequest`
# :param str message: a description of the exception
# """
#
# message = 'Could not get a response'
#
# def __init__(self, request, *args, **kwargs):
# super().__init__(*args, **kwargs)
# self.request = request
, which may contain function names, class names, or code. Output only the next line. | def test_token_is_present_in_headers(self): |
Given the following code snippet before the placeholder: <|code_start|>
class SessionTests(unittest.TestCase):
def setUp(self):
self.token = 'abc123'
self.session = session.Session(self.token)
self.url = 'https://example.com/foo'
@responses.activate
def test_token_is_present_in_headers(self):
responses.add(responses.GET, self.url)
self.session.get(self.url)
self.assertEqual(responses.calls[0].request.headers['x-access-token'],
self.token)
def test_content_type_is_json(self):
headers = self.session.headers or {}
<|code_end|>
, predict the next line using imports from the current file:
import unittest
import requests
import responses
from unittest import mock
from groupy import session
from groupy.exceptions import BadResponse
from groupy.exceptions import InvalidJsonError
from groupy.exceptions import MissingMetaError
from groupy.exceptions import MissingResponseError
from groupy.exceptions import NoResponse
and context including class names, function names, and sometimes code from other files:
# Path: groupy/session.py
# class Session(requests.Session):
# class Response:
# def __init__(self, token):
# def request(self, *args, **kwargs):
# def __init__(self, response):
# def __getattr__(self, attr):
# def data(self):
# def errors(self):
#
# Path: groupy/exceptions.py
# class BadResponse(ApiError):
# """Exception raised when the status code of the response was 400 or more.
#
# :param response: the response
# :type response: :class:`~requests.Response`
# :param str message: a description of the exception
# """
#
# message = 'Got a bad response'
#
# def __init__(self, response, message=None):
# if message is None:
# message = self._extract_message(response)
# super().__init__(message=message)
# self.response = response
#
# def _extract_message(self, response):
# try:
# meta = response.json()['meta']
# code = meta.get('code', response.status_code)
# errors = ','.join(meta.get('errors', ['unknown']))
# except (ValueError, KeyError):
# return None
# return 'HTTP {code}: {errors}'.format(code=code, errors=errors)
#
# Path: groupy/exceptions.py
# class InvalidJsonError(BadResponse):
# """Exception raised for incomplete/invalid JSON in a response."""
#
# def __init__(self, response, message='The JSON was incomplete/invalid'):
# super().__init__(response, message)
#
# Path: groupy/exceptions.py
# class MissingMetaError(BadResponse):
# """Exception raised for a response that lacks meta data."""
#
# def __init__(self, response, message='The response contained no meta data'):
# super().__init__(response, message)
#
# Path: groupy/exceptions.py
# class MissingResponseError(BadResponse):
# """Exception raised for a response that lacks response data."""
#
# def __init__(self, response, message='The response contained no response data'):
# super().__init__(response, message)
#
# Path: groupy/exceptions.py
# class NoResponse(ApiError):
# """Exception raised when the API server could not be reached.
#
# :param request: the original request that was made
# :type request: :class:`~requests.PreparedRequest`
# :param str message: a description of the exception
# """
#
# message = 'Could not get a response'
#
# def __init__(self, request, *args, **kwargs):
# super().__init__(*args, **kwargs)
# self.request = request
. Output only the next line. | self.assertIn('content-type', headers) |
Next line prediction: <|code_start|>
class BlocksTests(unittest.TestCase):
def setUp(self):
self.m_session = mock.Mock()
self.user_id = 'foo'
self.blocks = blocks.Blocks(self.m_session, self.user_id)
class BlocksListTests(BlocksTests):
def setUp(self):
super().setUp()
data = {'blocks': [{'x': 'X'}, {'y': 'Y'}]}
self.m_session.get.return_value = mock.Mock(data=data)
self.results = self.blocks.list()
def test_result_is_blocks(self):
self.assertTrue(all(isinstance(b, blocks.Block) for b in self.results))
<|code_end|>
. Use current file imports:
(import unittest
from unittest import mock
from groupy.api import blocks)
and context including class names, function names, or small code snippets from other files:
# Path: groupy/api/blocks.py
# class Blocks(base.Manager):
# class Block(base.ManagedResource):
# def __init__(self, session, user_id):
# def list(self):
# def between(self, other_user_id):
# def block(self, other_user_id):
# def unblock(self, other_user_id):
# def __repr__(self):
# def __eq__(self, other):
# def exists(self):
# def unblock(self):
. Output only the next line. | def test_user_id_is_in_params(self): |
Using the snippet: <|code_start|>
class ListBotsTests(BotsTests):
def setUp(self):
super().setUp()
self.m_session.get.return_value = mock.Mock(data=[{'x': 'X'}])
self.results = self.bots.list()
def test_results_are_bots(self):
self.assertTrue(all(isinstance(b, bots.Bot) for b in self.results))
class CreateBotsTests(BotsTests):
def setUp(self):
super().setUp()
self.m_session.post.return_value = mock.Mock(data={'bot': {'x': 'X'}})
self.result = self.bots.create(name='foo', group_id='bar', baz='qux')
def test_result_is_bot(self):
self.assertTrue(isinstance(self.result, bots.Bot))
def test_name_is_in_payload(self):
__, kwargs = self.m_session.post.call_args
payload = kwargs['json']
self.assertEqual(payload['bot'].get('name'), 'foo')
def test_group_id_is_in_payload(self):
__, kwargs = self.m_session.post.call_args
payload = kwargs['json']
self.assertEqual(payload['bot'].get('group_id'), 'bar')
<|code_end|>
, determine the next line of code. You have imports:
import unittest
from unittest import mock
from groupy.api import bots
and context (class names, function names, or code) available:
# Path: groupy/api/bots.py
# class Bots(base.Manager):
# class Bot(base.ManagedResource):
# def __init__(self, session):
# def list(self):
# def create(self, name, group_id, avatar_url=None, callback_url=None,
# dm_notification=None, **kwargs):
# def post(self, bot_id, text, attachments=None):
# def destroy(self, bot_id):
# def __repr__(self):
# def __eq__(self, other):
# def post(self, text, attachments=None):
# def destroy(self):
. Output only the next line. | def test_details_in_payload(self): |
Given the following code snippet before the placeholder: <|code_start|>
class MangerTests(unittest.TestCase):
def setUp(self):
self.manager = base.Manager(mock.Mock(), path='foo')
def test_url_contains_path(self):
self.assertEqual(self.manager.url, self.manager.base_url + 'foo')
class ResourceTests(unittest.TestCase):
def setUp(self):
<|code_end|>
, predict the next line using imports from the current file:
import unittest
from unittest import mock
from groupy.api import base
and context including class names, function names, and sometimes code from other files:
# Path: groupy/api/base.py
# class Manager:
# class Resource:
# class ManagedResource(Resource):
# def __init__(self, session, path=None):
# def __init__(self, **data):
# def __getattr__(self, attr):
# def __getstate__(self):
# def __setstate__(self, d):
# def __init__(self, manager, **data):
. Output only the next line. | self.data = {'foo': 'bar'} |
Given snippet: <|code_start|> def test_result_is_not_ready(self):
self.assertFalse(self.is_ready)
def test_getting_result_raises_not_ready_exception(self):
with self.assertRaises(ResultsNotReady):
self.request.get()
class ExpiredResultsTests(MembershipRequestTests):
def setUp(self):
super().setUp()
self.m_manager.check.side_effect = ResultsExpired(response=None)
self.is_ready = self.request.is_ready()
def test_result_is_ready(self):
self.assertTrue(self.is_ready)
def test_getting_result_raises_expired_exception(self):
with self.assertRaises(ResultsExpired):
self.request.get()
class FailureResultsTests(MembershipRequestTests):
def setUp(self):
super().setUp()
self.m_manager.check.return_value = self.requests[1:]
self.is_ready = self.request.is_ready()
self.results = self.request.get()
def test_not_all_requests_have_results(self):
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from unittest import mock
from groupy.api import memberships
from groupy.exceptions import ResultsNotReady
from groupy.exceptions import ResultsExpired
from .base import get_fake_response, get_fake_member_data
from .base import TestCase
and context:
# Path: groupy/api/memberships.py
# class Memberships(base.Manager):
# class Member(base.ManagedResource):
# class MembershipRequest(base.ManagedResource):
# def __init__(self, session, group_id):
# def add(self, nickname, email=None, phone_number=None, user_id=None):
# def add_multiple(self, *users):
# def check(self, results_id):
# def update(self, nickname=None, **kwargs):
# def remove(self, membership_id):
# def __init__(self, manager, group_id, **data):
# def __repr__(self):
# def __eq__(self, other):
# def post(self, text=None, attachments=None, source_guid=None):
# def is_blocked(self):
# def block(self):
# def unblock(self):
# def remove(self):
# def add_to_group(self, group_id, nickname=None):
# def __init__(self, manager, *requests, **data):
# def check_if_ready(self):
# def get_failed_requests(self, results):
# def get_new_members(self, results):
# def is_ready(self, check=True):
# def poll(self, timeout=30, interval=2):
# def get(self):
#
# Path: groupy/exceptions.py
# class ResultsNotReady(ResultsError):
# """Exception raised when results are not yet ready.
#
# :param response: the response
# :type response: :class:`~requests.Response`
# :param str message: a description of the exception
# """
#
# def __init__(self, response, message='The results are not ready yet'):
# super().__init__(response, message)
#
# Path: groupy/exceptions.py
# class ResultsExpired(ResultsError):
# """Exception raised when the results have expired.
#
# :param response: the response
# :type response: :class:`~requests.Response`
# :param str message: a description of the exception
# """
#
# def __init__(self, response, message='The results have expired'):
# super().__init__(response, message)
#
# Path: tests/api/base.py
# def get_fake_response(code=200, data=None):
# response = mock.Mock()
# response.status_code = code
# response.data = data
# return response
#
# def get_fake_member_data(**kwargs):
# data = {
# 'id': 'foo',
# 'user_id': 'baz',
# 'nickname': 'nick',
# }
# data.update(kwargs)
# return data
#
# Path: tests/api/base.py
# class TestCase(unittest.TestCase):
# def assert_kwargs(self, mock, **kwargs):
# __, m_kwargs = mock.call_args
# for k, v in kwargs.items():
# with self.subTest(key=k, value=v):
# self.assertEqual(m_kwargs.get(k), v)
which might include code, classes, or functions. Output only the next line. | self.assertNotEqual(len(self.results.members), len(self.requests)) |
Given the following code snippet before the placeholder: <|code_start|>
def test_result_is_ready(self):
self.assertTrue(self.is_ready)
def test_results_are_members(self):
for member in self.results.members:
with self.subTest(member=member):
self.assertIsInstance(member, memberships.Member)
def test_there_are_no_failures(self):
self.assertEqual(self.results.failures, [])
def test_resulting_members_have_no_guids(self):
for member in self.results.members:
with self.subTest(member=member):
with self.assertRaises(AttributeError):
member.guid
class NotReadyResultsTests(MembershipRequestTests):
def setUp(self):
super().setUp()
self.m_manager.check.side_effect = ResultsNotReady(response=None)
self.is_ready = self.request.is_ready()
def test_result_is_not_ready(self):
self.assertFalse(self.is_ready)
def test_getting_result_raises_not_ready_exception(self):
with self.assertRaises(ResultsNotReady):
<|code_end|>
, predict the next line using imports from the current file:
from unittest import mock
from groupy.api import memberships
from groupy.exceptions import ResultsNotReady
from groupy.exceptions import ResultsExpired
from .base import get_fake_response, get_fake_member_data
from .base import TestCase
and context including class names, function names, and sometimes code from other files:
# Path: groupy/api/memberships.py
# class Memberships(base.Manager):
# class Member(base.ManagedResource):
# class MembershipRequest(base.ManagedResource):
# def __init__(self, session, group_id):
# def add(self, nickname, email=None, phone_number=None, user_id=None):
# def add_multiple(self, *users):
# def check(self, results_id):
# def update(self, nickname=None, **kwargs):
# def remove(self, membership_id):
# def __init__(self, manager, group_id, **data):
# def __repr__(self):
# def __eq__(self, other):
# def post(self, text=None, attachments=None, source_guid=None):
# def is_blocked(self):
# def block(self):
# def unblock(self):
# def remove(self):
# def add_to_group(self, group_id, nickname=None):
# def __init__(self, manager, *requests, **data):
# def check_if_ready(self):
# def get_failed_requests(self, results):
# def get_new_members(self, results):
# def is_ready(self, check=True):
# def poll(self, timeout=30, interval=2):
# def get(self):
#
# Path: groupy/exceptions.py
# class ResultsNotReady(ResultsError):
# """Exception raised when results are not yet ready.
#
# :param response: the response
# :type response: :class:`~requests.Response`
# :param str message: a description of the exception
# """
#
# def __init__(self, response, message='The results are not ready yet'):
# super().__init__(response, message)
#
# Path: groupy/exceptions.py
# class ResultsExpired(ResultsError):
# """Exception raised when the results have expired.
#
# :param response: the response
# :type response: :class:`~requests.Response`
# :param str message: a description of the exception
# """
#
# def __init__(self, response, message='The results have expired'):
# super().__init__(response, message)
#
# Path: tests/api/base.py
# def get_fake_response(code=200, data=None):
# response = mock.Mock()
# response.status_code = code
# response.data = data
# return response
#
# def get_fake_member_data(**kwargs):
# data = {
# 'id': 'foo',
# 'user_id': 'baz',
# 'nickname': 'nick',
# }
# data.update(kwargs)
# return data
#
# Path: tests/api/base.py
# class TestCase(unittest.TestCase):
# def assert_kwargs(self, mock, **kwargs):
# __, m_kwargs = mock.call_args
# for k, v in kwargs.items():
# with self.subTest(key=k, value=v):
# self.assertEqual(m_kwargs.get(k), v)
. Output only the next line. | self.request.get() |
Predict the next line after this snippet: <|code_start|> def setUp(self):
super().setUp()
self.member.unblock()
def test_uses_user_id(self):
self.assert_kwargs(self._blocks.unblock,
other_user_id=self.data['user_id'])
class RemoveMemberTests(MemberTests):
def setUp(self):
super().setUp()
self.member.remove()
def test_uses_user_id(self):
self.assert_kwargs(self.member._memberships.remove,
membership_id=self.data['id'])
class MembershipRequestTests(TestCase):
def setUp(self):
self.m_manager = mock.Mock()
self.requests = [get_fake_member_data(guid='foo-%s' % n) for n in range(2)]
self.request = memberships.MembershipRequest(self.m_manager,
*self.requests,
group_id='baz',
results_id='bar')
class ReadyResultsTests(MembershipRequestTests):
<|code_end|>
using the current file's imports:
from unittest import mock
from groupy.api import memberships
from groupy.exceptions import ResultsNotReady
from groupy.exceptions import ResultsExpired
from .base import get_fake_response, get_fake_member_data
from .base import TestCase
and any relevant context from other files:
# Path: groupy/api/memberships.py
# class Memberships(base.Manager):
# class Member(base.ManagedResource):
# class MembershipRequest(base.ManagedResource):
# def __init__(self, session, group_id):
# def add(self, nickname, email=None, phone_number=None, user_id=None):
# def add_multiple(self, *users):
# def check(self, results_id):
# def update(self, nickname=None, **kwargs):
# def remove(self, membership_id):
# def __init__(self, manager, group_id, **data):
# def __repr__(self):
# def __eq__(self, other):
# def post(self, text=None, attachments=None, source_guid=None):
# def is_blocked(self):
# def block(self):
# def unblock(self):
# def remove(self):
# def add_to_group(self, group_id, nickname=None):
# def __init__(self, manager, *requests, **data):
# def check_if_ready(self):
# def get_failed_requests(self, results):
# def get_new_members(self, results):
# def is_ready(self, check=True):
# def poll(self, timeout=30, interval=2):
# def get(self):
#
# Path: groupy/exceptions.py
# class ResultsNotReady(ResultsError):
# """Exception raised when results are not yet ready.
#
# :param response: the response
# :type response: :class:`~requests.Response`
# :param str message: a description of the exception
# """
#
# def __init__(self, response, message='The results are not ready yet'):
# super().__init__(response, message)
#
# Path: groupy/exceptions.py
# class ResultsExpired(ResultsError):
# """Exception raised when the results have expired.
#
# :param response: the response
# :type response: :class:`~requests.Response`
# :param str message: a description of the exception
# """
#
# def __init__(self, response, message='The results have expired'):
# super().__init__(response, message)
#
# Path: tests/api/base.py
# def get_fake_response(code=200, data=None):
# response = mock.Mock()
# response.status_code = code
# response.data = data
# return response
#
# def get_fake_member_data(**kwargs):
# data = {
# 'id': 'foo',
# 'user_id': 'baz',
# 'nickname': 'nick',
# }
# data.update(kwargs)
# return data
#
# Path: tests/api/base.py
# class TestCase(unittest.TestCase):
# def assert_kwargs(self, mock, **kwargs):
# __, m_kwargs = mock.call_args
# for k, v in kwargs.items():
# with self.subTest(key=k, value=v):
# self.assertEqual(m_kwargs.get(k), v)
. Output only the next line. | def setUp(self): |
Next line prediction: <|code_start|> with self.assertRaises(ResultsExpired):
self.memberships.check('bar')
def test_results_available(self):
data = {'members': [{'baz': 'qux'}]}
self.m_session.get.return_value = get_fake_response(data=data)
result = self.memberships.check('bar')
self.assertEqual(result, data['members'])
class RemoveMembershipTests(MembershipsTests):
def test_result_is_True(self):
self.m_session.post.return_value = mock.Mock(ok=True)
self.assertTrue(self.memberships.remove('bar'))
class MemberTests(TestCase):
@mock.patch('groupy.api.memberships.Memberships')
@mock.patch('groupy.api.memberships.user')
def setUp(self, *__):
self.m_manager = mock.Mock()
self.data = get_fake_member_data(group_id='foo_group_id')
self.member = memberships.Member(self.m_manager, **self.data)
self._blocks = self.member._user.blocks
self._memberships = self.member._memberships
class MemberEqualityTests(MemberTests):
def test_same_id(self):
member = memberships.Member(self.m_manager, **self.data)
<|code_end|>
. Use current file imports:
(from unittest import mock
from groupy.api import memberships
from groupy.exceptions import ResultsNotReady
from groupy.exceptions import ResultsExpired
from .base import get_fake_response, get_fake_member_data
from .base import TestCase)
and context including class names, function names, or small code snippets from other files:
# Path: groupy/api/memberships.py
# class Memberships(base.Manager):
# class Member(base.ManagedResource):
# class MembershipRequest(base.ManagedResource):
# def __init__(self, session, group_id):
# def add(self, nickname, email=None, phone_number=None, user_id=None):
# def add_multiple(self, *users):
# def check(self, results_id):
# def update(self, nickname=None, **kwargs):
# def remove(self, membership_id):
# def __init__(self, manager, group_id, **data):
# def __repr__(self):
# def __eq__(self, other):
# def post(self, text=None, attachments=None, source_guid=None):
# def is_blocked(self):
# def block(self):
# def unblock(self):
# def remove(self):
# def add_to_group(self, group_id, nickname=None):
# def __init__(self, manager, *requests, **data):
# def check_if_ready(self):
# def get_failed_requests(self, results):
# def get_new_members(self, results):
# def is_ready(self, check=True):
# def poll(self, timeout=30, interval=2):
# def get(self):
#
# Path: groupy/exceptions.py
# class ResultsNotReady(ResultsError):
# """Exception raised when results are not yet ready.
#
# :param response: the response
# :type response: :class:`~requests.Response`
# :param str message: a description of the exception
# """
#
# def __init__(self, response, message='The results are not ready yet'):
# super().__init__(response, message)
#
# Path: groupy/exceptions.py
# class ResultsExpired(ResultsError):
# """Exception raised when the results have expired.
#
# :param response: the response
# :type response: :class:`~requests.Response`
# :param str message: a description of the exception
# """
#
# def __init__(self, response, message='The results have expired'):
# super().__init__(response, message)
#
# Path: tests/api/base.py
# def get_fake_response(code=200, data=None):
# response = mock.Mock()
# response.status_code = code
# response.data = data
# return response
#
# def get_fake_member_data(**kwargs):
# data = {
# 'id': 'foo',
# 'user_id': 'baz',
# 'nickname': 'nick',
# }
# data.update(kwargs)
# return data
#
# Path: tests/api/base.py
# class TestCase(unittest.TestCase):
# def assert_kwargs(self, mock, **kwargs):
# __, m_kwargs = mock.call_args
# for k, v in kwargs.items():
# with self.subTest(key=k, value=v):
# self.assertEqual(m_kwargs.get(k), v)
. Output only the next line. | self.assertEqual(self.member, member) |
Predict the next line for this snippet: <|code_start|>class RemoveMembershipTests(MembershipsTests):
def test_result_is_True(self):
self.m_session.post.return_value = mock.Mock(ok=True)
self.assertTrue(self.memberships.remove('bar'))
class MemberTests(TestCase):
@mock.patch('groupy.api.memberships.Memberships')
@mock.patch('groupy.api.memberships.user')
def setUp(self, *__):
self.m_manager = mock.Mock()
self.data = get_fake_member_data(group_id='foo_group_id')
self.member = memberships.Member(self.m_manager, **self.data)
self._blocks = self.member._user.blocks
self._memberships = self.member._memberships
class MemberEqualityTests(MemberTests):
def test_same_id(self):
member = memberships.Member(self.m_manager, **self.data)
self.assertEqual(self.member, member)
def test_different_id(self):
member = memberships.Member(self.m_manager, **self.data)
member.id = 2 * self.member.id
self.assertNotEqual(self.member, member)
class MemberIsBlockedTests(MemberTests):
def setUp(self):
<|code_end|>
with the help of current file imports:
from unittest import mock
from groupy.api import memberships
from groupy.exceptions import ResultsNotReady
from groupy.exceptions import ResultsExpired
from .base import get_fake_response, get_fake_member_data
from .base import TestCase
and context from other files:
# Path: groupy/api/memberships.py
# class Memberships(base.Manager):
# class Member(base.ManagedResource):
# class MembershipRequest(base.ManagedResource):
# def __init__(self, session, group_id):
# def add(self, nickname, email=None, phone_number=None, user_id=None):
# def add_multiple(self, *users):
# def check(self, results_id):
# def update(self, nickname=None, **kwargs):
# def remove(self, membership_id):
# def __init__(self, manager, group_id, **data):
# def __repr__(self):
# def __eq__(self, other):
# def post(self, text=None, attachments=None, source_guid=None):
# def is_blocked(self):
# def block(self):
# def unblock(self):
# def remove(self):
# def add_to_group(self, group_id, nickname=None):
# def __init__(self, manager, *requests, **data):
# def check_if_ready(self):
# def get_failed_requests(self, results):
# def get_new_members(self, results):
# def is_ready(self, check=True):
# def poll(self, timeout=30, interval=2):
# def get(self):
#
# Path: groupy/exceptions.py
# class ResultsNotReady(ResultsError):
# """Exception raised when results are not yet ready.
#
# :param response: the response
# :type response: :class:`~requests.Response`
# :param str message: a description of the exception
# """
#
# def __init__(self, response, message='The results are not ready yet'):
# super().__init__(response, message)
#
# Path: groupy/exceptions.py
# class ResultsExpired(ResultsError):
# """Exception raised when the results have expired.
#
# :param response: the response
# :type response: :class:`~requests.Response`
# :param str message: a description of the exception
# """
#
# def __init__(self, response, message='The results have expired'):
# super().__init__(response, message)
#
# Path: tests/api/base.py
# def get_fake_response(code=200, data=None):
# response = mock.Mock()
# response.status_code = code
# response.data = data
# return response
#
# def get_fake_member_data(**kwargs):
# data = {
# 'id': 'foo',
# 'user_id': 'baz',
# 'nickname': 'nick',
# }
# data.update(kwargs)
# return data
#
# Path: tests/api/base.py
# class TestCase(unittest.TestCase):
# def assert_kwargs(self, mock, **kwargs):
# __, m_kwargs = mock.call_args
# for k, v in kwargs.items():
# with self.subTest(key=k, value=v):
# self.assertEqual(m_kwargs.get(k), v)
, which may contain function names, class names, or code. Output only the next line. | super().setUp() |
Given snippet: <|code_start|>
class ExpiredResultsTests(MembershipRequestTests):
def setUp(self):
super().setUp()
self.m_manager.check.side_effect = ResultsExpired(response=None)
self.is_ready = self.request.is_ready()
def test_result_is_ready(self):
self.assertTrue(self.is_ready)
def test_getting_result_raises_expired_exception(self):
with self.assertRaises(ResultsExpired):
self.request.get()
class FailureResultsTests(MembershipRequestTests):
def setUp(self):
super().setUp()
self.m_manager.check.return_value = self.requests[1:]
self.is_ready = self.request.is_ready()
self.results = self.request.get()
def test_not_all_requests_have_results(self):
self.assertNotEqual(len(self.results.members), len(self.requests))
def test_there_are_failures(self):
self.assertTrue(self.results.failures)
def test_the_failure_is_the_correct_request(self):
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from unittest import mock
from groupy.api import memberships
from groupy.exceptions import ResultsNotReady
from groupy.exceptions import ResultsExpired
from .base import get_fake_response, get_fake_member_data
from .base import TestCase
and context:
# Path: groupy/api/memberships.py
# class Memberships(base.Manager):
# class Member(base.ManagedResource):
# class MembershipRequest(base.ManagedResource):
# def __init__(self, session, group_id):
# def add(self, nickname, email=None, phone_number=None, user_id=None):
# def add_multiple(self, *users):
# def check(self, results_id):
# def update(self, nickname=None, **kwargs):
# def remove(self, membership_id):
# def __init__(self, manager, group_id, **data):
# def __repr__(self):
# def __eq__(self, other):
# def post(self, text=None, attachments=None, source_guid=None):
# def is_blocked(self):
# def block(self):
# def unblock(self):
# def remove(self):
# def add_to_group(self, group_id, nickname=None):
# def __init__(self, manager, *requests, **data):
# def check_if_ready(self):
# def get_failed_requests(self, results):
# def get_new_members(self, results):
# def is_ready(self, check=True):
# def poll(self, timeout=30, interval=2):
# def get(self):
#
# Path: groupy/exceptions.py
# class ResultsNotReady(ResultsError):
# """Exception raised when results are not yet ready.
#
# :param response: the response
# :type response: :class:`~requests.Response`
# :param str message: a description of the exception
# """
#
# def __init__(self, response, message='The results are not ready yet'):
# super().__init__(response, message)
#
# Path: groupy/exceptions.py
# class ResultsExpired(ResultsError):
# """Exception raised when the results have expired.
#
# :param response: the response
# :type response: :class:`~requests.Response`
# :param str message: a description of the exception
# """
#
# def __init__(self, response, message='The results have expired'):
# super().__init__(response, message)
#
# Path: tests/api/base.py
# def get_fake_response(code=200, data=None):
# response = mock.Mock()
# response.status_code = code
# response.data = data
# return response
#
# def get_fake_member_data(**kwargs):
# data = {
# 'id': 'foo',
# 'user_id': 'baz',
# 'nickname': 'nick',
# }
# data.update(kwargs)
# return data
#
# Path: tests/api/base.py
# class TestCase(unittest.TestCase):
# def assert_kwargs(self, mock, **kwargs):
# __, m_kwargs = mock.call_args
# for k, v in kwargs.items():
# with self.subTest(key=k, value=v):
# self.assertEqual(m_kwargs.get(k), v)
which might include code, classes, or functions. Output only the next line. | self.assertEqual(self.results.failures, self.requests[:1]) |
Here is a snippet: <|code_start|> def test_path_appending(self):
url = utils.urljoin(self.url, 'bar')
self.assertEqual(url, 'http://example.com/foo/bar')
class TrailingSlashUrlJoinTests(UrlJoinTests):
url = 'http://example.com/foo/'
class ParseShareUrlTests(unittest.TestCase):
url = 'http://example.com/foo/group_id/share_token'
def setUp(self):
self.group_id, self.share_token = utils.parse_share_url(self.url)
def test_group_id_is_correct(self):
self.assertEqual(self.group_id, 'group_id')
def test_share_token_is_correct(self):
self.assertEqual(self.share_token, 'share_token')
class TrailingSlashParseShareUrlTests(ParseShareUrlTests):
url = 'http://example.com/foo/group_id/share_token/'
class FilterTests(unittest.TestCase):
def setUp(self):
self.objects = [
mock.Mock(foo='foo', baz=0),
<|code_end|>
. Write the next line using the current file imports:
import unittest
from unittest import mock
from groupy import utils
from groupy import exceptions
and context from other files:
# Path: groupy/utils.py
# def urljoin(base, path=None):
# def parse_share_url(share_url):
# def get_rfc3339(when):
# def get_datetime(timestamp):
# def __init__(self, key, value):
# def __repr__(self):
# def __call__(self, obj):
# def __init__(self, tests):
# def __call__(self, objects):
# def find(self, objects):
# def passes(self, obj):
# def make_filter(**tests):
# class AttrTest:
# class Filter:
#
# Path: groupy/exceptions.py
# class GroupyError(Exception):
# class MissingMembershipError(GroupyError):
# class FindError(GroupyError):
# class NoMatchesError(FindError):
# class MultipleMatchesError(FindError):
# class ApiError(GroupyError):
# class NoResponse(ApiError):
# class BadResponse(ApiError):
# class InvalidJsonError(BadResponse):
# class MissingResponseError(BadResponse):
# class MissingMetaError(BadResponse):
# class ResultsError(ApiError):
# class ResultsNotReady(ResultsError):
# class ResultsExpired(ResultsError):
# def __init__(self, message=None):
# def __init__(self, group_id, user_id, message=None):
# def __init__(self, message, objects, tests, matches=None):
# def __init__(self, objects, tests):
# def __init__(self, objects, tests, matches):
# def __init__(self, request, *args, **kwargs):
# def __init__(self, response, message=None):
# def _extract_message(self, response):
# def __init__(self, response, message='The JSON was incomplete/invalid'):
# def __init__(self, response, message='The response contained no response data'):
# def __init__(self, response, message='The response contained no meta data'):
# def __init__(self, response, message):
# def __init__(self, response, message='The results are not ready yet'):
# def __init__(self, response, message='The results have expired'):
, which may include functions, classes, or code. Output only the next line. | mock.Mock(foo='bar', baz=1), |
Using the snippet: <|code_start|>
class UrlJoinTests(unittest.TestCase):
url = 'http://example.com/foo'
def test_result_is_base_when_no_path(self):
self.assertEqual(utils.urljoin(self.url), self.url)
def test_path_appending(self):
url = utils.urljoin(self.url, 'bar')
self.assertEqual(url, 'http://example.com/foo/bar')
class TrailingSlashUrlJoinTests(UrlJoinTests):
url = 'http://example.com/foo/'
class ParseShareUrlTests(unittest.TestCase):
url = 'http://example.com/foo/group_id/share_token'
def setUp(self):
self.group_id, self.share_token = utils.parse_share_url(self.url)
def test_group_id_is_correct(self):
self.assertEqual(self.group_id, 'group_id')
def test_share_token_is_correct(self):
self.assertEqual(self.share_token, 'share_token')
<|code_end|>
, determine the next line of code. You have imports:
import unittest
from unittest import mock
from groupy import utils
from groupy import exceptions
and context (class names, function names, or code) available:
# Path: groupy/utils.py
# def urljoin(base, path=None):
# def parse_share_url(share_url):
# def get_rfc3339(when):
# def get_datetime(timestamp):
# def __init__(self, key, value):
# def __repr__(self):
# def __call__(self, obj):
# def __init__(self, tests):
# def __call__(self, objects):
# def find(self, objects):
# def passes(self, obj):
# def make_filter(**tests):
# class AttrTest:
# class Filter:
#
# Path: groupy/exceptions.py
# class GroupyError(Exception):
# class MissingMembershipError(GroupyError):
# class FindError(GroupyError):
# class NoMatchesError(FindError):
# class MultipleMatchesError(FindError):
# class ApiError(GroupyError):
# class NoResponse(ApiError):
# class BadResponse(ApiError):
# class InvalidJsonError(BadResponse):
# class MissingResponseError(BadResponse):
# class MissingMetaError(BadResponse):
# class ResultsError(ApiError):
# class ResultsNotReady(ResultsError):
# class ResultsExpired(ResultsError):
# def __init__(self, message=None):
# def __init__(self, group_id, user_id, message=None):
# def __init__(self, message, objects, tests, matches=None):
# def __init__(self, objects, tests):
# def __init__(self, objects, tests, matches):
# def __init__(self, request, *args, **kwargs):
# def __init__(self, response, message=None):
# def _extract_message(self, response):
# def __init__(self, response, message='The JSON was incomplete/invalid'):
# def __init__(self, response, message='The response contained no response data'):
# def __init__(self, response, message='The response contained no meta data'):
# def __init__(self, response, message):
# def __init__(self, response, message='The results are not ready yet'):
# def __init__(self, response, message='The results have expired'):
. Output only the next line. | class TrailingSlashParseShareUrlTests(ParseShareUrlTests): |
Based on the snippet: <|code_start|>
class PagerTests(unittest.TestCase):
def setUp(self):
self.m_manager = mock.Mock()
self.m_endpoint = mock.Mock()
self.m_endpoint.side_effect = ['abc', 'xyz']
self.params = {'x': 42}
self.pager = pagers.Pager(self.m_manager, self.m_endpoint, **self.params)
def test_iterates_over_current_page_items(self):
items = list(self.pager)
self.assertEqual(items, list('abc'))
def test_autopage_iterates_over_items_from_all_pages(self):
self.pager.set_next_page_params = mock.Mock()
items = list(self.pager.autopage())
<|code_end|>
, predict the immediate next line with the help of imports:
import unittest
from unittest import mock
from groupy import pagers
and context (classes, functions, sometimes code) from other files:
# Path: groupy/pagers.py
# class Pager:
# class GroupList(Pager):
# class ChatList(GroupList):
# class MessageList(Pager):
# class GalleryList(MessageList):
# def __init__(self, manager, endpoint, **params):
# def __getitem__(self, index):
# def __iter__(self):
# def set_next_page_params(self):
# def fetch(self):
# def fetch_next(self):
# def autopage(self):
# def set_next_page_params(self):
# def __init__(self, manager, endpoint, **params):
# def detect_mode(cls, **params):
# def set_next_page_params(self):
# def get_last_item_index(self):
# def get_next_page_param(self, item):
# def fetch_next(self):
# def get_next_page_param(self, item):
. Output only the next line. | self.assertEqual(items, list('abcxyz')) |
Given the code snippet: <|code_start|>
class ImagesTests(unittest.TestCase):
def setUp(self):
self.m_session = mock.Mock()
self.images = attachments.Images(self.m_session)
class UploadImageTests(ImagesTests):
def setUp(self):
super().setUp()
self.m_session.post.return_value = mock.Mock(data={'url': 'bar'})
self.result = self.images.upload(io.BytesIO(b'foo'))
def test_result_is_payload(self):
self.assertEqual(self.result, {'url': 'bar'})
class DownloadImageTests(ImagesTests):
def setUp(self):
super().setUp()
self.m_session.get.return_value = mock.Mock(content='bar')
m_image_attachment = mock.Mock(url='foo')
self.result = self.images.download(m_image_attachment)
<|code_end|>
, generate the next line using the imports in this file:
import io
import unittest
from unittest import mock
from groupy.api import attachments
and context (functions, classes, or occasionally code) from other files:
# Path: groupy/api/attachments.py
# class AttachmentMeta(type):
# class Attachment(base.Resource, metaclass=AttachmentMeta):
# class Location(Attachment):
# class Split(Attachment):
# class Emoji(Attachment):
# class Mentions(Attachment):
# class Image(Attachment):
# class LinkedImage(Image):
# class Images(base.Manager):
# def __init__(cls, name, bases, attrs):
# def __init__(self, type, **data):
# def to_json(self):
# def from_data(cls, type, **data):
# def from_bulk_data(cls, attachments):
# def __init__(self, lat, lng, name, foursqure_venue_id=None):
# def __init__(self, token):
# def __init__(self, placeholder, charmap):
# def __init__(self, loci=None, user_ids=None):
# def __init__(self, url, source_url=None, file_id=None):
# def from_file(self, fp):
# def upload(self, fp):
# def download(self, image, url_field='url', suffix=None):
# def download_preview(self, image, url_field='url'):
# def download_large(self, image, url_field='url'):
# def download_avatar(self, image, url_field='url'):
. Output only the next line. | def test_result_is_content(self): |
Predict the next line for this snippet: <|code_start|>#
# Zoe documentation build configuration file, created by
# sphinx-quickstart on Fri Sep 11 15:11:20 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath(os.path.join(os.path.basename(__file__), "..", "..")))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
<|code_end|>
with the help of current file imports:
import sys
import os
from zoe_lib.version import ZOE_VERSION
and context from other files:
# Path: zoe_lib/version.py
# ZOE_VERSION = '2018.12'
, which may contain function names, class names, or code. Output only the next line. | 'sphinx.ext.coverage', |
Predict the next line for this snippet: <|code_start|> def can_handle(opts, spec: str) -> bool:
"""The gen spec follows gen:[key=value[,key=value]] format."""
return spec.startswith("gen:")
@staticmethod
def check(opts, spec: str) -> Tuple[couchbaseConstants.PUMP_ERROR, Optional[Dict[str, Any]]]:
rv, cfg = GenSource.parse_spec(opts, spec)
if rv != 0:
return rv, None
return 0, {'cfg': cfg,
'spec': spec,
'buckets': [{'name': 'default',
'nodes': [{'hostname': f'N/A-{i!s}'}
for i in range(opts.threads)]}]}
@staticmethod
def parse_spec(opts, spec: str) -> Tuple[couchbaseConstants.PUMP_ERROR, Optional[Dict[str, Any]]]:
"""Parse the comma-separated key=value configuration from the gen spec.
Names and semantics were inspired from subset of mcsoda parameters."""
cfg = {'cur-ops': 0,
'cur-gets': 0,
'cur-sets': 0,
'cur-items': 0,
'exit-after-creates': 0,
'max-items': 10000,
'min-value-size': 10,
'prefix': "",
'ratio-sets': 0.05,
'json': 0,
'low-compression': False,
<|code_end|>
with the help of current file imports:
import json
import random
import string
import struct
import couchbaseConstants
import pump
from typing import Any, Dict, Optional, Tuple
from cb_bin_client import encode_collection_id
and context from other files:
# Path: cb_bin_client.py
# def encode_collection_id(cid: int) -> bytes:
# output = array.array('B', [0])
# while cid > 0:
# byte = cid & 0xFF
# cid >>= 7
# # CID has more bits
# if cid > 0:
# # Set the 'continue' bit of this byte
# byte |= 0x80
# output[-1] = byte
# output.append(0)
# else:
# output[-1] = byte
# return output.tobytes()
, which may contain function names, class names, or code. Output only the next line. | 'xattr': False} |
Predict the next line for this snippet: <|code_start|> if not buckets:
return f'error: no bucket subdirectories at: {d}', None
return 0, {'spec': spec, 'buckets': buckets}
@staticmethod
def vbucket_states(opts, spec, bucket_dir) -> Tuple[couchbaseConstants.PUMP_ERROR, Optional[Dict[str, Any]]]:
"""Reads all the latest couchstore files in a directory, and returns
map of state string (e.g., 'active') to map of vbucket_id to doc."""
vbucket_states: Dict[str, Any] = defaultdict(dict)
for f in latest_couch_files(bucket_dir):
vbucket_id = int(re.match(SFD_RE, os.path.basename(f)).group(1)) # type: ignore
try:
store = couchstore.CouchStore(f, 'r')
try:
doc_str = store.localDocs['_local/vbstate']
if doc_str:
doc = json.loads(doc_str)
state = doc.get('state', None)
if state:
vbucket_states[state][vbucket_id] = doc
else:
return f'error: missing vbucket_state from: {f}', None
except Exception as e:
return f'error: could not read _local/vbstate from: {f}; exception: {e}', None
store.close()
except Exception as e:
return f'error: could not read couchstore file: {f}; exception: {e}', None
<|code_end|>
with the help of current file imports:
import glob
import json
import logging
import os
import queue
import re
import struct
import threading
import couchstore # pylint: disable=import-error
import couchbaseConstants
import pump
from collections import defaultdict
from typing import Any, Dict, List, Optional, Tuple
from cb_bin_client import decode_collection_id, encode_collection_id
and context from other files:
# Path: cb_bin_client.py
# def decode_collection_id(key: bytes) -> Tuple[int, bytes]:
# # A leb128 varint encodes the CID
# data = array.array('B')
# data.frombytes(key)
# cid = data[0] & 0x7f
# end = 1
# if (data[0] & 0x80) == 0x80:
# shift = 7
# for end in range(1, len(data)):
# cid |= ((data[end] & 0x7f) << shift)
# if (data[end] & 0x80) == 0:
# break
# shift = shift + 7
#
# end = end + 1
# if end == len(data):
# # We should of stopped for a stop byte, not the end of the buffer
# raise ValueError("encoded key did not contain a stop byte")
# return cid, key[end:]
#
# def encode_collection_id(cid: int) -> bytes:
# output = array.array('B', [0])
# while cid > 0:
# byte = cid & 0xFF
# cid >>= 7
# # CID has more bits
# if cid > 0:
# # Set the 'continue' bit of this byte
# byte |= 0x80
# output[-1] = byte
# output.append(0)
# else:
# output[-1] = byte
# return output.tobytes()
, which may contain function names, class names, or code. Output only the next line. | if vbucket_states: |
Using the snippet: <|code_start|> vbucket_id = None
# Level of indirection since we can't use python 3 nonlocal statement.
abatch: List[pump.Batch] = [pump.Batch(self)]
def change_callback(doc_info):
if doc_info:
# Handle the new key name spacing for collections and co
cid, key = decode_collection_id(doc_info.id.encode())
# Only support keys in the _default collection
if cid != 0:
logging.debug('Skipping as not default collection')
return
if self.skip(key, vbucket_id):
return
if doc_info.deleted:
cmd = couchbaseConstants.CMD_DCP_DELETE
else:
cmd = couchbaseConstants.CMD_DCP_MUTATION
# Deletes/tombstones may contain a body if they contain xattrs; a 'KeyError' from 'couchstore' indicates
# when this is not the case.
try:
val = doc_info.getContents(options=couchstore.CouchStore.DECOMPRESS)
except KeyError:
val = b''
try:
<|code_end|>
, determine the next line of code. You have imports:
import glob
import json
import logging
import os
import queue
import re
import struct
import threading
import couchstore # pylint: disable=import-error
import couchbaseConstants
import pump
from collections import defaultdict
from typing import Any, Dict, List, Optional, Tuple
from cb_bin_client import decode_collection_id, encode_collection_id
and context (class names, function names, or code) available:
# Path: cb_bin_client.py
# def decode_collection_id(key: bytes) -> Tuple[int, bytes]:
# # A leb128 varint encodes the CID
# data = array.array('B')
# data.frombytes(key)
# cid = data[0] & 0x7f
# end = 1
# if (data[0] & 0x80) == 0x80:
# shift = 7
# for end in range(1, len(data)):
# cid |= ((data[end] & 0x7f) << shift)
# if (data[end] & 0x80) == 0:
# break
# shift = shift + 7
#
# end = end + 1
# if end == len(data):
# # We should of stopped for a stop byte, not the end of the buffer
# raise ValueError("encoded key did not contain a stop byte")
# return cid, key[end:]
#
# def encode_collection_id(cid: int) -> bytes:
# output = array.array('B', [0])
# while cid > 0:
# byte = cid & 0xFF
# cid >>= 7
# # CID has more bits
# if cid > 0:
# # Set the 'continue' bit of this byte
# byte |= 0x80
# output[-1] = byte
# output.append(0)
# else:
# output[-1] = byte
# return output.tobytes()
. Output only the next line. | rev_meta_bytes = doc_info.revMeta.get_bytes() |
Here is a snippet: <|code_start|> else:
return f'error: {str_msg}', None, None
elif r_status == couchbaseConstants.ERR_UNKNOWN_COMMAND:
if self.op_map == OP_MAP:
if not retry:
return f'error: unknown command: {r_cmd}', None, None
else:
if not retry:
logging.warning("destination does not take XXX-WITH-META"
" commands; will use META-less commands")
self.op_map = OP_MAP
retry = True
elif r_status == couchbaseConstants.ERR_ACCESS:
return json.loads(r_val)["error"]["context"], None, None
elif r_status == couchbaseConstants.ERR_UNKNOWN_COLLECTION:
return json.loads(r_val)["error"]["context"], None, None
else:
return "error: MCSink MC error: " + str(r_status), None, None
except Exception as e:
logging.error(f'MCSink exception: {e}')
return f'error: MCSink exception: {e!s}', None, None
return 0, retry, refresh
def translate_cmd(self, cmd: int, op: str, meta: bytes) -> Tuple[couchbaseConstants.PUMP_ERROR, Optional[int]]:
if len(meta) == 0:
# The source gave no meta, so use regular commands.
self.op_map = OP_MAP
if cmd in [couchbaseConstants.CMD_TAP_MUTATION, couchbaseConstants.CMD_DCP_MUTATION]:
<|code_end|>
. Write the next line using the current file imports:
import json
import logging
import re
import socket
import struct
import sys
import time
import snappy # pylint: disable=import-error
import cb_bin_client
import couchbaseConstants
import pump
import ctypes
import ctypes
from typing import Any, Dict, List, Optional, Tuple
from cb_util import tag_user_data
and context from other files:
# Path: cb_util.py
# def tag_user_data(value):
# '''Adds tags to user data so that it can be redacted later'''
# return f'<ud>{str(value)}</ud>'
, which may include functions, classes, or code. Output only the next line. | m = self.op_map.get(op, None) |
Given the following code snippet before the placeholder: <|code_start|>
# Send all of the keys in quiet
for opaque, kv in opaqued.items():
self._send_cmd(couchbaseConstants.CMD_SETQ, kv[0], kv[1], opaque, extra)
self._send_cmd(couchbaseConstants.CMD_NOOP, b'', b'', terminal)
# Handle the response
failed = []
done = False
while not done:
try:
opaque, cas, data = self._handle_single_response(None) # type: ignore
done = opaque == terminal
except MemcachedError as e:
failed.append(e)
return failed
def del_multi(self, items):
"""Multi-delete (using delq).
Give me a collection of keys."""
opaqued = dict(enumerate(items))
terminal = len(opaqued) + 10
extra = b''
# Send all of the keys in quiet
for opaque, k in opaqued.items():
<|code_end|>
, predict the next line using imports from the current file:
import array
import random
import socket
import ssl
import struct
import couchbaseConstants
from typing import Iterable, List, Optional, Tuple, Union
from couchbaseConstants import (AUDIT_PKT_FMT, INCRDECR_RES_FMT, MIN_RECV_PACKET, REQ_MAGIC_BYTE, REQ_PKT_FMT,
RES_MAGIC_BYTE, RES_PKT_FMT, SET_PKT_FMT)
from cb_version import VERSION # pylint: disable=import-error
and context including class names, function names, and sometimes code from other files:
# Path: couchbaseConstants.py
# AUDIT_PKT_FMT = ">I"
#
# INCRDECR_RES_FMT = ">Q"
#
# MIN_RECV_PACKET = struct.calcsize(REQ_PKT_FMT)
#
# REQ_MAGIC_BYTE = 0x80
#
# REQ_PKT_FMT = ">BBHBBHIIQ"
#
# RES_MAGIC_BYTE = 0x81
#
# RES_PKT_FMT = ">BBHBBHIIQ"
#
# SET_PKT_FMT = ">II"
. Output only the next line. | self._send_cmd(couchbaseConstants.CMD_DELETEQ, k, b'', opaque, extra) |
Here is a snippet: <|code_start|>
return failed
def del_multi(self, items):
"""Multi-delete (using delq).
Give me a collection of keys."""
opaqued = dict(enumerate(items))
terminal = len(opaqued) + 10
extra = b''
# Send all of the keys in quiet
for opaque, k in opaqued.items():
self._send_cmd(couchbaseConstants.CMD_DELETEQ, k, b'', opaque, extra)
self._send_cmd(couchbaseConstants.CMD_NOOP, b'', b'', terminal)
# Handle the response
failed = []
done = False
while not done:
try:
opaque, cas, data = self._handle_single_response(None)
done = opaque == terminal
except MemcachedError as e:
failed.append(e)
return failed
<|code_end|>
. Write the next line using the current file imports:
import array
import random
import socket
import ssl
import struct
import couchbaseConstants
from typing import Iterable, List, Optional, Tuple, Union
from couchbaseConstants import (AUDIT_PKT_FMT, INCRDECR_RES_FMT, MIN_RECV_PACKET, REQ_MAGIC_BYTE, REQ_PKT_FMT,
RES_MAGIC_BYTE, RES_PKT_FMT, SET_PKT_FMT)
from cb_version import VERSION # pylint: disable=import-error
and context from other files:
# Path: couchbaseConstants.py
# AUDIT_PKT_FMT = ">I"
#
# INCRDECR_RES_FMT = ">Q"
#
# MIN_RECV_PACKET = struct.calcsize(REQ_PKT_FMT)
#
# REQ_MAGIC_BYTE = 0x80
#
# REQ_PKT_FMT = ">BBHBBHIIQ"
#
# RES_MAGIC_BYTE = 0x81
#
# RES_PKT_FMT = ">BBHBBHIIQ"
#
# SET_PKT_FMT = ">II"
, which may include functions, classes, or code. Output only the next line. | def stats(self, sub: bytes = b''): |
Next line prediction: <|code_start|> self.vbucket_id = vbucket
return self._do_cmd(couchbaseConstants.CMD_GET_VBUCKET_STATE, b'', b'')
def delete_vbucket(self, vbucket: int):
assert isinstance(vbucket, int)
self.vbucket_id = vbucket
return self._do_cmd(couchbaseConstants.CMD_DELETE_VBUCKET, b'', b'')
def evict_key(self, key: bytes):
return self._do_cmd(couchbaseConstants.CMD_EVICT_KEY, key, b'')
def get_multi(self, keys: Iterable[bytes]):
"""Get values for any available keys in the given iterable.
Returns a dict of matched keys to their values."""
opaqued = dict(enumerate(keys))
terminal = len(opaqued) + 10
# Send all of the keys in quiet
for k, v in opaqued.items():
self._send_cmd(couchbaseConstants.CMD_GETQ, v, b'', k)
self._send_cmd(couchbaseConstants.CMD_NOOP, b'', b'', terminal)
# Handle the response
rv = {}
done = False
while not done:
opaque, cas, data = self._handle_single_response(None) # type: ignore
if opaque != terminal:
rv[opaqued[opaque]] = self.__parse_get((opaque, cas, data)) # type: ignore
<|code_end|>
. Use current file imports:
(import array
import random
import socket
import ssl
import struct
import couchbaseConstants
from typing import Iterable, List, Optional, Tuple, Union
from couchbaseConstants import (AUDIT_PKT_FMT, INCRDECR_RES_FMT, MIN_RECV_PACKET, REQ_MAGIC_BYTE, REQ_PKT_FMT,
RES_MAGIC_BYTE, RES_PKT_FMT, SET_PKT_FMT)
from cb_version import VERSION # pylint: disable=import-error)
and context including class names, function names, or small code snippets from other files:
# Path: couchbaseConstants.py
# AUDIT_PKT_FMT = ">I"
#
# INCRDECR_RES_FMT = ">Q"
#
# MIN_RECV_PACKET = struct.calcsize(REQ_PKT_FMT)
#
# REQ_MAGIC_BYTE = 0x80
#
# REQ_PKT_FMT = ">BBHBBHIIQ"
#
# RES_MAGIC_BYTE = 0x81
#
# RES_PKT_FMT = ">BBHBBHIIQ"
#
# SET_PKT_FMT = ">II"
. Output only the next line. | else: |
Given the following code snippet before the placeholder: <|code_start|> return self._do_cmd(couchbaseConstants.CMD_SET_PARAM, key, val, type_bytes)
def set_vbucket_state(self, vbucket: int, state_name: str):
assert isinstance(vbucket, int)
self.vbucket_id = vbucket
state = struct.pack(couchbaseConstants.VB_SET_PKT_FMT,
couchbaseConstants.VB_STATE_NAMES[state_name])
return self._do_cmd(couchbaseConstants.CMD_SET_VBUCKET_STATE, b'', b'', state)
def get_vbucket_state(self, vbucket: int):
assert isinstance(vbucket, int)
self.vbucket_id = vbucket
return self._do_cmd(couchbaseConstants.CMD_GET_VBUCKET_STATE, b'', b'')
def delete_vbucket(self, vbucket: int):
assert isinstance(vbucket, int)
self.vbucket_id = vbucket
return self._do_cmd(couchbaseConstants.CMD_DELETE_VBUCKET, b'', b'')
def evict_key(self, key: bytes):
return self._do_cmd(couchbaseConstants.CMD_EVICT_KEY, key, b'')
def get_multi(self, keys: Iterable[bytes]):
"""Get values for any available keys in the given iterable.
Returns a dict of matched keys to their values."""
opaqued = dict(enumerate(keys))
terminal = len(opaqued) + 10
# Send all of the keys in quiet
for k, v in opaqued.items():
<|code_end|>
, predict the next line using imports from the current file:
import array
import random
import socket
import ssl
import struct
import couchbaseConstants
from typing import Iterable, List, Optional, Tuple, Union
from couchbaseConstants import (AUDIT_PKT_FMT, INCRDECR_RES_FMT, MIN_RECV_PACKET, REQ_MAGIC_BYTE, REQ_PKT_FMT,
RES_MAGIC_BYTE, RES_PKT_FMT, SET_PKT_FMT)
from cb_version import VERSION # pylint: disable=import-error
and context including class names, function names, and sometimes code from other files:
# Path: couchbaseConstants.py
# AUDIT_PKT_FMT = ">I"
#
# INCRDECR_RES_FMT = ">Q"
#
# MIN_RECV_PACKET = struct.calcsize(REQ_PKT_FMT)
#
# REQ_MAGIC_BYTE = 0x80
#
# REQ_PKT_FMT = ">BBHBBHIIQ"
#
# RES_MAGIC_BYTE = 0x81
#
# RES_PKT_FMT = ">BBHBBHIIQ"
#
# SET_PKT_FMT = ">II"
. Output only the next line. | self._send_cmd(couchbaseConstants.CMD_GETQ, v, b'', k) |
Given snippet: <|code_start|> """Get values for any available keys in the given iterable.
Returns a dict of matched keys to their values."""
opaqued = dict(enumerate(keys))
terminal = len(opaqued) + 10
# Send all of the keys in quiet
for k, v in opaqued.items():
self._send_cmd(couchbaseConstants.CMD_GETQ, v, b'', k)
self._send_cmd(couchbaseConstants.CMD_NOOP, b'', b'', terminal)
# Handle the response
rv = {}
done = False
while not done:
opaque, cas, data = self._handle_single_response(None) # type: ignore
if opaque != terminal:
rv[opaqued[opaque]] = self.__parse_get((opaque, cas, data)) # type: ignore
else:
done = True
return rv
def set_multi(self, exp: int, flags: int, items):
"""Multi-set (using setq).
Give me (key, value) pairs."""
# If this is a dict, convert it to a pair generator
if hasattr(items, 'iteritems'):
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import array
import random
import socket
import ssl
import struct
import couchbaseConstants
from typing import Iterable, List, Optional, Tuple, Union
from couchbaseConstants import (AUDIT_PKT_FMT, INCRDECR_RES_FMT, MIN_RECV_PACKET, REQ_MAGIC_BYTE, REQ_PKT_FMT,
RES_MAGIC_BYTE, RES_PKT_FMT, SET_PKT_FMT)
from cb_version import VERSION # pylint: disable=import-error
and context:
# Path: couchbaseConstants.py
# AUDIT_PKT_FMT = ">I"
#
# INCRDECR_RES_FMT = ">Q"
#
# MIN_RECV_PACKET = struct.calcsize(REQ_PKT_FMT)
#
# REQ_MAGIC_BYTE = 0x80
#
# REQ_PKT_FMT = ">BBHBBHIIQ"
#
# RES_MAGIC_BYTE = 0x81
#
# RES_PKT_FMT = ">BBHBBHIIQ"
#
# SET_PKT_FMT = ">II"
which might include code, classes, or functions. Output only the next line. | items = items.items() |
Based on the snippet: <|code_start|> opaque, cas, data = self._handle_single_response(None) # type: ignore
if opaque != terminal:
rv[opaqued[opaque]] = self.__parse_get((opaque, cas, data)) # type: ignore
else:
done = True
return rv
def set_multi(self, exp: int, flags: int, items):
"""Multi-set (using setq).
Give me (key, value) pairs."""
# If this is a dict, convert it to a pair generator
if hasattr(items, 'iteritems'):
items = items.items()
opaqued = dict(enumerate(items))
terminal = len(opaqued) + 10
extra = struct.pack(SET_PKT_FMT, flags, exp)
# Send all of the keys in quiet
for opaque, kv in opaqued.items():
self._send_cmd(couchbaseConstants.CMD_SETQ, kv[0], kv[1], opaque, extra)
self._send_cmd(couchbaseConstants.CMD_NOOP, b'', b'', terminal)
# Handle the response
failed = []
done = False
<|code_end|>
, predict the immediate next line with the help of imports:
import array
import random
import socket
import ssl
import struct
import couchbaseConstants
from typing import Iterable, List, Optional, Tuple, Union
from couchbaseConstants import (AUDIT_PKT_FMT, INCRDECR_RES_FMT, MIN_RECV_PACKET, REQ_MAGIC_BYTE, REQ_PKT_FMT,
RES_MAGIC_BYTE, RES_PKT_FMT, SET_PKT_FMT)
from cb_version import VERSION # pylint: disable=import-error
and context (classes, functions, sometimes code) from other files:
# Path: couchbaseConstants.py
# AUDIT_PKT_FMT = ">I"
#
# INCRDECR_RES_FMT = ">Q"
#
# MIN_RECV_PACKET = struct.calcsize(REQ_PKT_FMT)
#
# REQ_MAGIC_BYTE = 0x80
#
# REQ_PKT_FMT = ">BBHBBHIIQ"
#
# RES_MAGIC_BYTE = 0x81
#
# RES_PKT_FMT = ">BBHBBHIIQ"
#
# SET_PKT_FMT = ">II"
. Output only the next line. | while not done: |
Here is a snippet: <|code_start|> return self._do_cmd(couchbaseConstants.CMD_SET_PARAM, key, val, type_bytes)
def set_vbucket_state(self, vbucket: int, state_name: str):
assert isinstance(vbucket, int)
self.vbucket_id = vbucket
state = struct.pack(couchbaseConstants.VB_SET_PKT_FMT,
couchbaseConstants.VB_STATE_NAMES[state_name])
return self._do_cmd(couchbaseConstants.CMD_SET_VBUCKET_STATE, b'', b'', state)
def get_vbucket_state(self, vbucket: int):
assert isinstance(vbucket, int)
self.vbucket_id = vbucket
return self._do_cmd(couchbaseConstants.CMD_GET_VBUCKET_STATE, b'', b'')
def delete_vbucket(self, vbucket: int):
assert isinstance(vbucket, int)
self.vbucket_id = vbucket
return self._do_cmd(couchbaseConstants.CMD_DELETE_VBUCKET, b'', b'')
def evict_key(self, key: bytes):
return self._do_cmd(couchbaseConstants.CMD_EVICT_KEY, key, b'')
def get_multi(self, keys: Iterable[bytes]):
"""Get values for any available keys in the given iterable.
Returns a dict of matched keys to their values."""
opaqued = dict(enumerate(keys))
terminal = len(opaqued) + 10
# Send all of the keys in quiet
for k, v in opaqued.items():
<|code_end|>
. Write the next line using the current file imports:
import array
import random
import socket
import ssl
import struct
import couchbaseConstants
from typing import Iterable, List, Optional, Tuple, Union
from couchbaseConstants import (AUDIT_PKT_FMT, INCRDECR_RES_FMT, MIN_RECV_PACKET, REQ_MAGIC_BYTE, REQ_PKT_FMT,
RES_MAGIC_BYTE, RES_PKT_FMT, SET_PKT_FMT)
from cb_version import VERSION # pylint: disable=import-error
and context from other files:
# Path: couchbaseConstants.py
# AUDIT_PKT_FMT = ">I"
#
# INCRDECR_RES_FMT = ">Q"
#
# MIN_RECV_PACKET = struct.calcsize(REQ_PKT_FMT)
#
# REQ_MAGIC_BYTE = 0x80
#
# REQ_PKT_FMT = ">BBHBBHIIQ"
#
# RES_MAGIC_BYTE = 0x81
#
# RES_PKT_FMT = ">BBHBBHIIQ"
#
# SET_PKT_FMT = ">II"
, which may include functions, classes, or code. Output only the next line. | self._send_cmd(couchbaseConstants.CMD_GETQ, v, b'', k) |
Given the following code snippet before the placeholder: <|code_start|>
class Assinatura(_Assinatura):
def assina_xml(self, xml_element):
cert, key = extract_cert_and_key_from_pfx(self.arquivo, self.senha)
for element in xml_element.iter("*"):
if element.text is not None and not element.text.strip():
element.text = None
signer = XMLSigner(
method=methods.enveloped,
signature_algorithm=u"rsa-sha1",
digest_algorithm=u"sha1",
c14n_algorithm=u"http://www.w3.org/TR/2001/REC-xml-c14n-20010315",
)
ns = {}
ns[None] = signer.namespaces["ds"]
signer.namespaces = ns
element_signed = xml_element.find(".//{http://nfse.goiania.go.gov.br/xsd/nfse_gyn_v02.xsd}Rps")
signed_root = signer.sign(
xml_element, key=key.encode(), cert=cert.encode()
)
<|code_end|>
, predict the next line using imports from the current file:
from lxml import etree
from pytrustnfe.certificado import extract_cert_and_key_from_pfx
from signxml import XMLSigner, methods
from pytrustnfe.nfe.assinatura import Assinatura as _Assinatura
and context including class names, function names, and sometimes code from other files:
# Path: pytrustnfe/certificado.py
# def extract_cert_and_key_from_pfx(pfx, password):
# pfx = crypto.load_pkcs12(pfx, password)
# # PEM formatted private key
# key = crypto.dump_privatekey(crypto.FILETYPE_PEM, pfx.get_privatekey())
# # PEM formatted certificate
# cert = crypto.dump_certificate(crypto.FILETYPE_PEM, pfx.get_certificate())
# return cert.decode(), key.decode()
#
# Path: pytrustnfe/nfe/assinatura.py
# class Assinatura(object):
# def __init__(self, arquivo, senha):
# self.arquivo = arquivo
# self.senha = senha
#
# def assina_xml(self, xml_element, reference, getchildren=False):
# cert, key = extract_cert_and_key_from_pfx(self.arquivo, self.senha)
#
# for element in xml_element.iter("*"):
# if element.text is not None and not element.text.strip():
# element.text = None
#
# signer = XMLSigner(
# method=signxml.methods.enveloped,
# signature_algorithm="rsa-sha1",
# digest_algorithm="sha1",
# c14n_algorithm="http://www.w3.org/TR/2001/REC-xml-c14n-20010315",
# )
#
# ns = {}
# ns[None] = signer.namespaces["ds"]
# signer.namespaces = ns
#
# ref_uri = ("#%s" % reference) if reference else None
# signed_root = signer.sign(
# xml_element, key=key.encode(), cert=cert.encode(), reference_uri=ref_uri
# )
# if reference:
# element_signed = signed_root.find(".//*[@Id='%s']" % reference)
# signature = signed_root.find(
# ".//{http://www.w3.org/2000/09/xmldsig#}Signature"
# )
#
# if getchildren and element_signed is not None and signature is not None:
# child = element_signed.getchildren()
# child.append(signature)
# elif element_signed is not None and signature is not None:
# parent = element_signed.getparent()
# parent.append(signature)
# return etree.tostring(signed_root, encoding=str)
. Output only the next line. | signature = signed_root.find( |
Predict the next line for this snippet: <|code_start|>
class Assinatura(_Assinatura):
def assina_xml(self, xml_element):
cert, key = extract_cert_and_key_from_pfx(self.arquivo, self.senha)
for element in xml_element.iter("*"):
if element.text is not None and not element.text.strip():
element.text = None
signer = XMLSigner(
method=methods.enveloped,
signature_algorithm=u"rsa-sha1",
digest_algorithm=u"sha1",
c14n_algorithm=u"http://www.w3.org/TR/2001/REC-xml-c14n-20010315",
<|code_end|>
with the help of current file imports:
from lxml import etree
from pytrustnfe.certificado import extract_cert_and_key_from_pfx
from signxml import XMLSigner, methods
from pytrustnfe.nfe.assinatura import Assinatura as _Assinatura
and context from other files:
# Path: pytrustnfe/certificado.py
# def extract_cert_and_key_from_pfx(pfx, password):
# pfx = crypto.load_pkcs12(pfx, password)
# # PEM formatted private key
# key = crypto.dump_privatekey(crypto.FILETYPE_PEM, pfx.get_privatekey())
# # PEM formatted certificate
# cert = crypto.dump_certificate(crypto.FILETYPE_PEM, pfx.get_certificate())
# return cert.decode(), key.decode()
#
# Path: pytrustnfe/nfe/assinatura.py
# class Assinatura(object):
# def __init__(self, arquivo, senha):
# self.arquivo = arquivo
# self.senha = senha
#
# def assina_xml(self, xml_element, reference, getchildren=False):
# cert, key = extract_cert_and_key_from_pfx(self.arquivo, self.senha)
#
# for element in xml_element.iter("*"):
# if element.text is not None and not element.text.strip():
# element.text = None
#
# signer = XMLSigner(
# method=signxml.methods.enveloped,
# signature_algorithm="rsa-sha1",
# digest_algorithm="sha1",
# c14n_algorithm="http://www.w3.org/TR/2001/REC-xml-c14n-20010315",
# )
#
# ns = {}
# ns[None] = signer.namespaces["ds"]
# signer.namespaces = ns
#
# ref_uri = ("#%s" % reference) if reference else None
# signed_root = signer.sign(
# xml_element, key=key.encode(), cert=cert.encode(), reference_uri=ref_uri
# )
# if reference:
# element_signed = signed_root.find(".//*[@Id='%s']" % reference)
# signature = signed_root.find(
# ".//{http://www.w3.org/2000/09/xmldsig#}Signature"
# )
#
# if getchildren and element_signed is not None and signature is not None:
# child = element_signed.getchildren()
# child.append(signature)
# elif element_signed is not None and signature is not None:
# parent = element_signed.getparent()
# parent.append(signature)
# return etree.tostring(signed_root, encoding=str)
, which may contain function names, class names, or code. Output only the next line. | ) |
Next line prediction: <|code_start|># -*- coding: utf-8 -*-
# © 2016 Danimar Ribeiro, Trustcode
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
class Assinatura(object):
def __init__(self, arquivo, senha):
self.arquivo = arquivo
self.senha = senha
def assina_xml(self, xml_element, reference, getchildren=False):
cert, key = extract_cert_and_key_from_pfx(self.arquivo, self.senha)
for element in xml_element.iter("*"):
if element.text is not None and not element.text.strip():
element.text = None
signer = XMLSigner(
method=signxml.methods.enveloped,
signature_algorithm="rsa-sha1",
<|code_end|>
. Use current file imports:
(import signxml
from lxml import etree
from pytrustnfe.certificado import extract_cert_and_key_from_pfx
from signxml import XMLSigner)
and context including class names, function names, or small code snippets from other files:
# Path: pytrustnfe/certificado.py
# def extract_cert_and_key_from_pfx(pfx, password):
# pfx = crypto.load_pkcs12(pfx, password)
# # PEM formatted private key
# key = crypto.dump_privatekey(crypto.FILETYPE_PEM, pfx.get_privatekey())
# # PEM formatted certificate
# cert = crypto.dump_certificate(crypto.FILETYPE_PEM, pfx.get_certificate())
# return cert.decode(), key.decode()
. Output only the next line. | digest_algorithm="sha1", |
Given snippet: <|code_start|># -*- coding: utf-8 -*-
# © 2016 Danimar Ribeiro, Trustcode
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
class Assinatura(object):
def __init__(self, arquivo, senha):
self.arquivo = arquivo
self.senha = senha
def assina_xml(self, xml_element, reference):
cert, key = extract_cert_and_key_from_pfx(self.arquivo, self.senha)
for element in xml_element.iter("*"):
if element.text is not None and not element.text.strip():
element.text = None
signer = XMLSigner(
method=signxml.methods.enveloped,
signature_algorithm=u"rsa-sha1",
digest_algorithm=u"sha1",
c14n_algorithm=u"http://www.w3.org/TR/2001/REC-xml-c14n-20010315",
)
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import signxml
from lxml import etree
from pytrustnfe.certificado import extract_cert_and_key_from_pfx
from signxml import XMLSigner
and context:
# Path: pytrustnfe/certificado.py
# def extract_cert_and_key_from_pfx(pfx, password):
# pfx = crypto.load_pkcs12(pfx, password)
# # PEM formatted private key
# key = crypto.dump_privatekey(crypto.FILETYPE_PEM, pfx.get_privatekey())
# # PEM formatted certificate
# cert = crypto.dump_certificate(crypto.FILETYPE_PEM, pfx.get_certificate())
# return cert.decode(), key.decode()
which might include code, classes, or functions. Output only the next line. | ns = {} |
Here is a snippet: <|code_start|>__author__ = 'JunSong<songjun54cm@gmail.com>'
# Date: 2018/12/13
class TFDatasetDataProvider(BasicDataProvider):
def __init__(self):
super(TFDatasetDataProvider, self).__init__()
self.train_data_iter = None
self.valid_data_iter = None
self.test_data_iter = None
@abc.abstractmethod
def build(self, config):
<|code_end|>
. Write the next line using the current file imports:
import abc
from data_provider.BasicDataProvider import BasicDataProvider
and context from other files:
# Path: data_provider/BasicDataProvider.py
# class BasicDataProvider(object):
# def __init__(self):
# pass
#
# @abc.abstractmethod
# def load_raw_data_samples(self, config):
# """
# load raw data samples and form a data sample list
# :param config: configures
# :return: list of data samples
# """
# raise NotImplementedError
#
# @abc.abstractmethod
# def build(self, config):
# """
# build data provider
# :param config: configurations
# :return: None
# """
# raise NotImplementedError
#
# def summarize(self):
# """
# logging data provider basic information
# :return:
# """
# logging.info('data provider fields: %s' % str(list(self.__dict__.keys())))
#
# def create(self, config):
# """
# load data provider from pkl file or build it if pkl file not exist.
# :param config: configurations
# :return: None
# """
# import os
# from ml_idiot.data_provider import get_dp_file_path
# if ('dp_file' in config) and config['dp_file'] is not None :
# dp_file_path = config['dp_file']
# else:
# dp_file_path = get_dp_file_path(config)
# if os.path.exists(dp_file_path):
# logging.info('loaded data provider from %s.' % dp_file_path)
# self.load(dp_file_path)
# else:
# self.build(config)
# self.save(dp_file_path)
# logging.info('build data provider and save into %s' % dp_file_path)
#
# self.summarize()
#
# @counting_time
# def save(self, file_path, verbose=False):
# """
# save data provider to pkl file
# :param file_path: pkl file path
# :param verbose: logging information or not
# :return: None
# """
# stime = time.time()
# if verbose:
# logging.info('trying to save provider into %s' % file_path),
# with open(file_path, 'wb') as f:
# pickle.dump(self.__dict__, f)
# # print(list(self.__dict__.keys()))
# if verbose:
# logging.info('finish in %.2f seconds.' % (time.time()-stime))
#
# @counting_time
# def load(self, file_path, mode='full', verbose=False):
# """
# load data provider from pkl file
# :param file_path: data provider pkl file path
# :param mode: full: fill in all the field;
# restrict: only fill in the field initialised.
# :param verbose: logging information or not
# :return: None
# """
# if verbose:
# start = time.time()
# logging.info('loading data provider...'),
# with open(file_path, 'rb') as f:
# d = pickle.load(f)
# # self.splits = d['splits']
# if mode=='restrict':
# for key in self.__dict__.keys():
# self.__dict__[key] = d[key]
# elif mode=='full':
# for key in d.keys():
# self.__dict__[key] = d[key]
# else:
# raise BaseException('%s mode not recognised.' % mode)
# self.prepare_data()
# if verbose:
# logging.info('finish in %.2f seconds.' % (time.time()-start))
#
#
#
# def prepare_data(self):
# # prepare data after load from file
# pass
#
# def form_batch_data(self, samples, options=None):
# return samples
, which may include functions, classes, or code. Output only the next line. | raise NotImplementError |
Predict the next line for this snippet: <|code_start|>#
# Hornet - SSH Honeypot
#
# Copyright (C) 2015 Aniket Panse <aniketpanse@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
gevent.monkey.patch_all()
class HornetTests(unittest.TestCase):
def setUp(self):
self.working_dir = tempfile.mkdtemp()
test_config = os.path.join(os.path.dirname(hornet.__file__), 'data', 'default_config.json')
shutil.copyfile(test_config, os.path.join(self.working_dir, 'config.json'))
<|code_end|>
with the help of current file imports:
import gevent.monkey
import paramiko
import os
import shutil
import unittest
import tempfile
import hornet
from hornet.main import Hornet
from hornet.common.helpers import get_random_item
and context from other files:
# Path: hornet/main.py
# class Hornet(object):
#
# def __init__(self, working_directory, vhost_create_fs=False):
# self.server = None
# self.handler = None
# self.server_greenlet = None
# self.session_q = gevent.queue.Queue()
# self.consumer = SessionConsumer(self.session_q)
# self.consumer_greenlet = None
# self.working_directory = working_directory
# self.config = self._load_config()
# self._vhost_create_fs = vhost_create_fs
# try:
# self.db_handler = DatabaseHandler(self.config)
# except Exception:
# logger.exception('Could not initialize database: %s', self.config.database)
#
# # Create virtual hosts
# self.vhosts = self._create_vhosts()
#
# def _load_config(self):
# config_path = os.path.join(self.working_directory, 'config.json')
# if not os.path.isfile(config_path):
# source = os.path.join(os.path.dirname(hornet.__file__), 'data', 'default_config.json')
# destination = config_path
# logger.info('Config file {} not found, copying default'.format(destination))
# shutil.copyfile(src=source, dst=destination)
# with open(config_path, 'r') as config_fp:
# config_params = json.load(config_fp)
# return Config(config_params)
#
# def _create_vhosts(self):
#
# # Create a directory for virtual filesystems, if it doesn't exist
# vhosts_path = os.path.join(self.working_directory, 'vhosts')
# if not os.path.isdir(vhosts_path):
# logger.info('Creating directory {} for virtual host filesystems'.format(vhosts_path))
# os.mkdir(vhosts_path)
#
# hosts = {}
# for host_params in self.config.vhost_params:
# h = VirtualHost(host_params, self.config.network, vhosts_path, create_fs=self._vhost_create_fs)
# hosts[h.hostname] = h
# return hosts
#
# def start(self):
# self.handler = SSHWrapper(self.vhosts, self.session_q, self.config, self.working_directory, self.db_handler)
# self.server = gevent.server.StreamServer((self.config.host, self.config.port),
# handle=self.handler.handle_session)
# self.server_greenlet = gevent.spawn(self.server.serve_forever)
# while self.server.server_port == 0:
# gevent.sleep(0) # Bad way of waiting, but can't think of anything right now.
# logger.info('SSH server listening on {}:{}'.format(self.server.server_host, self.server.server_port))
#
# self.consumer_greenlet = self.consumer.start()
# return [self.server_greenlet, self.consumer_greenlet]
#
# def stop(self):
# logging.debug('Stopping the server')
# self.server.stop()
# self.consumer.stop()
#
# Path: hornet/common/helpers.py
# def get_random_item(collection):
# if isinstance(collection, dict):
# all_keys = list(collection.keys())
# r = random.choice(all_keys)
# return collection[r]
# elif isinstance(collection, list):
# return random.choice(collection)
, which may contain function names, class names, or code. Output only the next line. | def tearDown(self): |
Predict the next line for this snippet: <|code_start|> gevent.sleep(0)
port = honeypot.server.server_port
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
# If we log in properly, this should raise no errors
client.connect('127.0.0.1', port=port, username='testuser', password='testpassword')
channel = client.invoke_shell()
while not channel.recv_ready():
gevent.sleep(0) # :-(
welcome = ''
while channel.recv_ready():
welcome += channel.recv(1)
lines = welcome.split('\r\n')
prompt = lines[-1]
self.assertTrue(prompt.endswith('$ '))
# Now send the echo command
channel.send('echo $BROWSER\r\n')
while not channel.recv_ready():
gevent.sleep(0) # :-(
output = ''
while not output.endswith('$ '):
output += channel.recv(1)
lines = output.split('\r\n')
command = lines[0]
<|code_end|>
with the help of current file imports:
import gevent.monkey
import paramiko
from hornet.main import Hornet
from hornet.tests.commands.base import BaseTestClass
and context from other files:
# Path: hornet/main.py
# class Hornet(object):
#
# def __init__(self, working_directory, vhost_create_fs=False):
# self.server = None
# self.handler = None
# self.server_greenlet = None
# self.session_q = gevent.queue.Queue()
# self.consumer = SessionConsumer(self.session_q)
# self.consumer_greenlet = None
# self.working_directory = working_directory
# self.config = self._load_config()
# self._vhost_create_fs = vhost_create_fs
# try:
# self.db_handler = DatabaseHandler(self.config)
# except Exception:
# logger.exception('Could not initialize database: %s', self.config.database)
#
# # Create virtual hosts
# self.vhosts = self._create_vhosts()
#
# def _load_config(self):
# config_path = os.path.join(self.working_directory, 'config.json')
# if not os.path.isfile(config_path):
# source = os.path.join(os.path.dirname(hornet.__file__), 'data', 'default_config.json')
# destination = config_path
# logger.info('Config file {} not found, copying default'.format(destination))
# shutil.copyfile(src=source, dst=destination)
# with open(config_path, 'r') as config_fp:
# config_params = json.load(config_fp)
# return Config(config_params)
#
# def _create_vhosts(self):
#
# # Create a directory for virtual filesystems, if it doesn't exist
# vhosts_path = os.path.join(self.working_directory, 'vhosts')
# if not os.path.isdir(vhosts_path):
# logger.info('Creating directory {} for virtual host filesystems'.format(vhosts_path))
# os.mkdir(vhosts_path)
#
# hosts = {}
# for host_params in self.config.vhost_params:
# h = VirtualHost(host_params, self.config.network, vhosts_path, create_fs=self._vhost_create_fs)
# hosts[h.hostname] = h
# return hosts
#
# def start(self):
# self.handler = SSHWrapper(self.vhosts, self.session_q, self.config, self.working_directory, self.db_handler)
# self.server = gevent.server.StreamServer((self.config.host, self.config.port),
# handle=self.handler.handle_session)
# self.server_greenlet = gevent.spawn(self.server.serve_forever)
# while self.server.server_port == 0:
# gevent.sleep(0) # Bad way of waiting, but can't think of anything right now.
# logger.info('SSH server listening on {}:{}'.format(self.server.server_host, self.server.server_port))
#
# self.consumer_greenlet = self.consumer.start()
# return [self.server_greenlet, self.consumer_greenlet]
#
# def stop(self):
# logging.debug('Stopping the server')
# self.server.stop()
# self.consumer.stop()
#
# Path: hornet/tests/commands/base.py
# class BaseTestClass(unittest.TestCase):
#
# def setUp(self):
# self.working_dir = tempfile.mkdtemp()
# test_config = os.path.join(os.path.dirname(hornet.__file__), 'data', 'default_config.json')
# shutil.copyfile(test_config, os.path.join(self.working_dir, 'config.json'))
#
# def tearDown(self):
# shutil.rmtree(self.working_dir)
#
# def create_filesystem(self, honeypot):
# default_host = honeypot.vhosts[honeypot.config.default_hostname]
# default_host.filesystem.makedir('/etc', recreate=True)
# default_host.filesystem.makedir('/var', recreate=True)
# default_host.filesystem.makedir('/bin', recreate=True)
# default_host.filesystem.makedir('/.hidden', recreate=True)
# default_host.filesystem.makedir('/etc/init.d', recreate=True)
# default_host.filesystem.create('/etc/passwd')
# default_host.filesystem.create('/etc/.config')
# default_host.filesystem.create('/etc/sysctl.conf')
# default_host.filesystem.create('/.hidden/.rcconf')
# default_host.filesystem.create('/initrd.img')
, which may contain function names, class names, or code. Output only the next line. | command_output = '\r\n'.join(lines[1:-1]) |
Next line prediction: <|code_start|> gevent.sleep(0) # :-(
output = ''
while not output.endswith('$ '):
output += channel.recv(1)
lines = output.split('\r\n')
command = lines[0]
command_output = '\r\n'.join(lines[1:-1])
next_prompt = lines[-1]
self.assertEquals('echo', command)
self.assertEquals('', command_output)
self.assertTrue(next_prompt.endswith('$ '))
honeypot.stop()
def test_echo_env_var(self):
""" Tests if echo command works when environment variables as specified in the
config are specified """
honeypot = Hornet(self.working_dir)
honeypot.start()
while honeypot.server.server_port == 0: # wait until the server is ready
gevent.sleep(0)
port = honeypot.server.server_port
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
# If we log in properly, this should raise no errors
client.connect('127.0.0.1', port=port, username='testuser', password='testpassword')
channel = client.invoke_shell()
<|code_end|>
. Use current file imports:
(import gevent.monkey
import paramiko
from hornet.main import Hornet
from hornet.tests.commands.base import BaseTestClass)
and context including class names, function names, or small code snippets from other files:
# Path: hornet/main.py
# class Hornet(object):
#
# def __init__(self, working_directory, vhost_create_fs=False):
# self.server = None
# self.handler = None
# self.server_greenlet = None
# self.session_q = gevent.queue.Queue()
# self.consumer = SessionConsumer(self.session_q)
# self.consumer_greenlet = None
# self.working_directory = working_directory
# self.config = self._load_config()
# self._vhost_create_fs = vhost_create_fs
# try:
# self.db_handler = DatabaseHandler(self.config)
# except Exception:
# logger.exception('Could not initialize database: %s', self.config.database)
#
# # Create virtual hosts
# self.vhosts = self._create_vhosts()
#
# def _load_config(self):
# config_path = os.path.join(self.working_directory, 'config.json')
# if not os.path.isfile(config_path):
# source = os.path.join(os.path.dirname(hornet.__file__), 'data', 'default_config.json')
# destination = config_path
# logger.info('Config file {} not found, copying default'.format(destination))
# shutil.copyfile(src=source, dst=destination)
# with open(config_path, 'r') as config_fp:
# config_params = json.load(config_fp)
# return Config(config_params)
#
# def _create_vhosts(self):
#
# # Create a directory for virtual filesystems, if it doesn't exist
# vhosts_path = os.path.join(self.working_directory, 'vhosts')
# if not os.path.isdir(vhosts_path):
# logger.info('Creating directory {} for virtual host filesystems'.format(vhosts_path))
# os.mkdir(vhosts_path)
#
# hosts = {}
# for host_params in self.config.vhost_params:
# h = VirtualHost(host_params, self.config.network, vhosts_path, create_fs=self._vhost_create_fs)
# hosts[h.hostname] = h
# return hosts
#
# def start(self):
# self.handler = SSHWrapper(self.vhosts, self.session_q, self.config, self.working_directory, self.db_handler)
# self.server = gevent.server.StreamServer((self.config.host, self.config.port),
# handle=self.handler.handle_session)
# self.server_greenlet = gevent.spawn(self.server.serve_forever)
# while self.server.server_port == 0:
# gevent.sleep(0) # Bad way of waiting, but can't think of anything right now.
# logger.info('SSH server listening on {}:{}'.format(self.server.server_host, self.server.server_port))
#
# self.consumer_greenlet = self.consumer.start()
# return [self.server_greenlet, self.consumer_greenlet]
#
# def stop(self):
# logging.debug('Stopping the server')
# self.server.stop()
# self.consumer.stop()
#
# Path: hornet/tests/commands/base.py
# class BaseTestClass(unittest.TestCase):
#
# def setUp(self):
# self.working_dir = tempfile.mkdtemp()
# test_config = os.path.join(os.path.dirname(hornet.__file__), 'data', 'default_config.json')
# shutil.copyfile(test_config, os.path.join(self.working_dir, 'config.json'))
#
# def tearDown(self):
# shutil.rmtree(self.working_dir)
#
# def create_filesystem(self, honeypot):
# default_host = honeypot.vhosts[honeypot.config.default_hostname]
# default_host.filesystem.makedir('/etc', recreate=True)
# default_host.filesystem.makedir('/var', recreate=True)
# default_host.filesystem.makedir('/bin', recreate=True)
# default_host.filesystem.makedir('/.hidden', recreate=True)
# default_host.filesystem.makedir('/etc/init.d', recreate=True)
# default_host.filesystem.create('/etc/passwd')
# default_host.filesystem.create('/etc/.config')
# default_host.filesystem.create('/etc/sysctl.conf')
# default_host.filesystem.create('/.hidden/.rcconf')
# default_host.filesystem.create('/initrd.img')
. Output only the next line. | while not channel.recv_ready(): |
Predict the next line for this snippet: <|code_start|> self.assertTrue(lines[1].startswith('--'))
self.assertTrue('http://pathod.net/response_preview?spec=200:r' in lines[1])
self.assertEquals('Resolving pathod.net (pathod.net)... '
'failed: Name or service not known.', lines[2])
self.assertEquals('wget: unable to resolve host address \'pathod.net\'', lines[3])
self.assertTrue(next_prompt.endswith('$ '))
honeypot.stop()
def test_wget_bad_content_length(self):
""" Tests if 'wget http://pathod.net/response_preview?spec=200%3Ar%3Ah%22Content-Length
%22%3D%22%27unparsable%22' shows an error resolving """
honeypot = Hornet(self.working_dir)
honeypot.start()
while honeypot.server.server_port == 0: # wait until the server is ready
gevent.sleep(0)
port = honeypot.server.server_port
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
# If we log in properly, this should raise no errors
client.connect('127.0.0.1', port=port, username='testuser', password='testpassword')
channel = client.invoke_shell()
while not channel.recv_ready():
gevent.sleep(0) # :-(
welcome = ''
while channel.recv_ready():
<|code_end|>
with the help of current file imports:
import gevent.monkey
import os
import unittest
import paramiko
import hornet
from hornet.main import Hornet
from hornet.tests.commands.base import BaseTestClass
and context from other files:
# Path: hornet/main.py
# class Hornet(object):
#
# def __init__(self, working_directory, vhost_create_fs=False):
# self.server = None
# self.handler = None
# self.server_greenlet = None
# self.session_q = gevent.queue.Queue()
# self.consumer = SessionConsumer(self.session_q)
# self.consumer_greenlet = None
# self.working_directory = working_directory
# self.config = self._load_config()
# self._vhost_create_fs = vhost_create_fs
# try:
# self.db_handler = DatabaseHandler(self.config)
# except Exception:
# logger.exception('Could not initialize database: %s', self.config.database)
#
# # Create virtual hosts
# self.vhosts = self._create_vhosts()
#
# def _load_config(self):
# config_path = os.path.join(self.working_directory, 'config.json')
# if not os.path.isfile(config_path):
# source = os.path.join(os.path.dirname(hornet.__file__), 'data', 'default_config.json')
# destination = config_path
# logger.info('Config file {} not found, copying default'.format(destination))
# shutil.copyfile(src=source, dst=destination)
# with open(config_path, 'r') as config_fp:
# config_params = json.load(config_fp)
# return Config(config_params)
#
# def _create_vhosts(self):
#
# # Create a directory for virtual filesystems, if it doesn't exist
# vhosts_path = os.path.join(self.working_directory, 'vhosts')
# if not os.path.isdir(vhosts_path):
# logger.info('Creating directory {} for virtual host filesystems'.format(vhosts_path))
# os.mkdir(vhosts_path)
#
# hosts = {}
# for host_params in self.config.vhost_params:
# h = VirtualHost(host_params, self.config.network, vhosts_path, create_fs=self._vhost_create_fs)
# hosts[h.hostname] = h
# return hosts
#
# def start(self):
# self.handler = SSHWrapper(self.vhosts, self.session_q, self.config, self.working_directory, self.db_handler)
# self.server = gevent.server.StreamServer((self.config.host, self.config.port),
# handle=self.handler.handle_session)
# self.server_greenlet = gevent.spawn(self.server.serve_forever)
# while self.server.server_port == 0:
# gevent.sleep(0) # Bad way of waiting, but can't think of anything right now.
# logger.info('SSH server listening on {}:{}'.format(self.server.server_host, self.server.server_port))
#
# self.consumer_greenlet = self.consumer.start()
# return [self.server_greenlet, self.consumer_greenlet]
#
# def stop(self):
# logging.debug('Stopping the server')
# self.server.stop()
# self.consumer.stop()
#
# Path: hornet/tests/commands/base.py
# class BaseTestClass(unittest.TestCase):
#
# def setUp(self):
# self.working_dir = tempfile.mkdtemp()
# test_config = os.path.join(os.path.dirname(hornet.__file__), 'data', 'default_config.json')
# shutil.copyfile(test_config, os.path.join(self.working_dir, 'config.json'))
#
# def tearDown(self):
# shutil.rmtree(self.working_dir)
#
# def create_filesystem(self, honeypot):
# default_host = honeypot.vhosts[honeypot.config.default_hostname]
# default_host.filesystem.makedir('/etc', recreate=True)
# default_host.filesystem.makedir('/var', recreate=True)
# default_host.filesystem.makedir('/bin', recreate=True)
# default_host.filesystem.makedir('/.hidden', recreate=True)
# default_host.filesystem.makedir('/etc/init.d', recreate=True)
# default_host.filesystem.create('/etc/passwd')
# default_host.filesystem.create('/etc/.config')
# default_host.filesystem.create('/etc/sysctl.conf')
# default_host.filesystem.create('/.hidden/.rcconf')
# default_host.filesystem.create('/initrd.img')
, which may contain function names, class names, or code. Output only the next line. | welcome += channel.recv(1) |
Predict the next line for this snippet: <|code_start|># !/usr/bin/env python
#
# Hornet - SSH Honeypot
#
# Copyright (C) 2015 Aniket Panse <aniketpanse@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
gevent.monkey.patch_all()
class HornetTests(unittest.TestCase):
def setUp(self):
self.working_dir = tempfile.mkdtemp()
<|code_end|>
with the help of current file imports:
import gevent.monkey
import os
import shutil
import unittest
import tempfile
import paramiko
import hornet
from hornet.main import Hornet
and context from other files:
# Path: hornet/main.py
# class Hornet(object):
#
# def __init__(self, working_directory, vhost_create_fs=False):
# self.server = None
# self.handler = None
# self.server_greenlet = None
# self.session_q = gevent.queue.Queue()
# self.consumer = SessionConsumer(self.session_q)
# self.consumer_greenlet = None
# self.working_directory = working_directory
# self.config = self._load_config()
# self._vhost_create_fs = vhost_create_fs
# try:
# self.db_handler = DatabaseHandler(self.config)
# except Exception:
# logger.exception('Could not initialize database: %s', self.config.database)
#
# # Create virtual hosts
# self.vhosts = self._create_vhosts()
#
# def _load_config(self):
# config_path = os.path.join(self.working_directory, 'config.json')
# if not os.path.isfile(config_path):
# source = os.path.join(os.path.dirname(hornet.__file__), 'data', 'default_config.json')
# destination = config_path
# logger.info('Config file {} not found, copying default'.format(destination))
# shutil.copyfile(src=source, dst=destination)
# with open(config_path, 'r') as config_fp:
# config_params = json.load(config_fp)
# return Config(config_params)
#
# def _create_vhosts(self):
#
# # Create a directory for virtual filesystems, if it doesn't exist
# vhosts_path = os.path.join(self.working_directory, 'vhosts')
# if not os.path.isdir(vhosts_path):
# logger.info('Creating directory {} for virtual host filesystems'.format(vhosts_path))
# os.mkdir(vhosts_path)
#
# hosts = {}
# for host_params in self.config.vhost_params:
# h = VirtualHost(host_params, self.config.network, vhosts_path, create_fs=self._vhost_create_fs)
# hosts[h.hostname] = h
# return hosts
#
# def start(self):
# self.handler = SSHWrapper(self.vhosts, self.session_q, self.config, self.working_directory, self.db_handler)
# self.server = gevent.server.StreamServer((self.config.host, self.config.port),
# handle=self.handler.handle_session)
# self.server_greenlet = gevent.spawn(self.server.serve_forever)
# while self.server.server_port == 0:
# gevent.sleep(0) # Bad way of waiting, but can't think of anything right now.
# logger.info('SSH server listening on {}:{}'.format(self.server.server_host, self.server.server_port))
#
# self.consumer_greenlet = self.consumer.start()
# return [self.server_greenlet, self.consumer_greenlet]
#
# def stop(self):
# logging.debug('Stopping the server')
# self.server.stop()
# self.consumer.stop()
, which may contain function names, class names, or code. Output only the next line. | test_config = os.path.join(os.path.dirname(hornet.__file__), 'data', 'default_config.json') |
Next line prediction: <|code_start|> self.shell.writeline('HTTP request sent, awaiting response... 200 OK')
self.shell.writeline('Length: {} ({}) [{}]'.format(
self.total_size,
human_readable(self.total_size),
self.content_type
))
self.shell.writeline('Saving to:\'{}\''.format(self.outputfile))
self.shell.writeline('')
def _render_progressbar(self):
while not self.currently_downloaded == self.total_size:
self.shell.updateline(self._get_progressbar())
gevent.sleep(0.3)
# Update one last time to show 100% progress
self.shell.updateline('{} in {:.2f}s'.format(self._get_progressbar(), time.clock()-self.start_time))
self.shell.writeline('')
def _get_progressbar(self):
percent = self.currently_downloaded / float(self.total_size)
done = int(percent * self.progressbar_size)
not_done = self.progressbar_size - done
elapsed_time = time.clock() - self.start_time
speed = human_readable(self.currently_downloaded / elapsed_time, suffix='B/s')
return self.PROGRESS_BAR.format(
percent,
(done - 1) * '=',
not_done * ' ',
self.total_size,
speed
<|code_end|>
. Use current file imports:
(import logging
import os
import random
import urlparse
import gevent
import requests
import requests.exceptions
import time
import socket
from contextlib import closing
from hornet.common.helpers import human_readable)
and context including class names, function names, or small code snippets from other files:
# Path: hornet/common/helpers.py
# def human_readable(num, suffix=''):
# for unit in ['', 'K', 'M', 'G', 'T', 'P', 'E', 'Z']:
# if abs(num) < 1024.0:
# return "%3.1f%s%s" % (num, unit, suffix)
# num /= 1024.0
# return "%.1f%s%s" % (num, 'Yi', suffix)
. Output only the next line. | ) |
Predict the next line for this snippet: <|code_start|># it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
gevent.monkey.patch_all()
class HornetTests(unittest.TestCase):
def setUp(self):
self.working_dir = tempfile.mkdtemp()
test_config = os.path.join(os.path.dirname(hornet.__file__), 'data', 'default_config.json')
shutil.copyfile(test_config, os.path.join(self.working_dir, 'config.json'))
def tearDown(self):
shutil.rmtree(self.working_dir)
def test_random_choice_dict(self):
test_dict = {
<|code_end|>
with the help of current file imports:
import gevent.monkey
import os
import shutil
import unittest
import tempfile
import hornet
from hornet.common.helpers import get_random_item
and context from other files:
# Path: hornet/common/helpers.py
# def get_random_item(collection):
# if isinstance(collection, dict):
# all_keys = list(collection.keys())
# r = random.choice(all_keys)
# return collection[r]
# elif isinstance(collection, list):
# return random.choice(collection)
, which may contain function names, class names, or code. Output only the next line. | 'a': 1, |
Based on the snippet: <|code_start|> account.followers_count = user['followers_count']
# If the endorser is set, update it.
if endorser is not None:
account.endorser = endorser
account.save()
except Account.DoesNotExist:
# Create the endorser, if not specified as an argument.
if endorser is None:
endorser = Endorser.objects.create(
name=user['name'],
description=user['description'],
url=url,
is_personal=True,
max_followers=user['followers_count'],
missing_image=False,
)
else:
endorser.missing_image = False
# We may need to update max_followers for the endorser.
if user['followers_count'] > endorser.max_followers:
endorser.max_followers = user['followers_count']
endorser.save()
account = self.create(
twitter_id=twitter_id,
screen_name=user['screen_name'],
<|code_end|>
, predict the immediate next line with the help of imports:
import shutil
import requests
import boto
from django.db import models
from django.urls import reverse
from endorsements.utils import get_twitter_client
and context (classes, functions, sometimes code) from other files:
# Path: endorsements/utils.py
# def get_twitter_client():
# return twitter.Twitter(
# auth=twitter.OAuth(
# os.environ.get('ACCESS_TOKEN'),
# os.environ.get('ACCESS_TOKEN_SECRET'),
# os.environ.get('CONSUMER_KEY'),
# os.environ.get('CONSUMER_SECRET'),
# )
# )
. Output only the next line. | name=user['name'], |
Given the following code snippet before the placeholder: <|code_start|>
class TestShortenFilter(TestCase):
expected = {
12345678: '12M',
1234567: '1.2M',
<|code_end|>
, predict the next line using imports from the current file:
from django.test import TestCase
from endorsements.templatetags.endorsement_extras import shorten
and context including class names, function names, and sometimes code from other files:
# Path: endorsements/templatetags/endorsement_extras.py
# @register.filter(name='shorten')
# def shorten(n):
# """Takes in an integer (over a thousand) and shortens it by using K or M.
# Examples:
# * 1,234,567 becomes 1.2M
# * 12,345 becomes 12K
# * 99,999,999 becomes 99M
# """
# if n > 10000000:
# return '%dM' % round(n / 1000000)
# elif n > 1000000:
# # It's between 1 and 10 million. Include the decimal point.
# return '%.1fM' % round(n / 1000000., 2)
# elif n > 1000:
# pre_decimal = int(round(n / 1000))
# return "%dK" % pre_decimal
# else:
# return str(n)
. Output only the next line. | 123456: '123K', |
Predict the next line for this snippet: <|code_start|>
URL = 'https://en.wikipedia.org/w/api.php?action=parse&page={slug}&prop=text&format=json§ion={section}'
SLUG = 'United_States_presidential_election,_2016'
SECTION = 36
class Command(BaseCommand):
help = 'Bulk import all the election results by state'
def add_arguments(self, parser):
parser.add_argument(
'--create',
action='store_true',
dest='create',
default=False,
help="Creates everything (otherwise, it's a dry run)",
<|code_end|>
with the help of current file imports:
from datetime import datetime
from BeautifulSoup import BeautifulSoup as BS
from django.core.management.base import BaseCommand, CommandError
from endorsements.models import Tag, Candidate
from wikipedia.models import BulkImport, ImportedResult
import requests
and context from other files:
# Path: endorsements/models.py
# class Tag(models.Model):
# name = models.CharField(max_length=50)
# description = models.TextField(blank=True)
# is_personal = models.BooleanField(default=True)
# category = models.ForeignKey(Category, null=True, blank=True)
#
# def __unicode__(self):
# return self.name
#
# class Meta:
# ordering = ['name']
#
# def get_mode(self):
# allow_personal = self.category.allow_personal
# allow_org = self.category.allow_org
# if allow_personal and allow_org:
# return 'none'
# elif allow_personal:
# return 'personal'
# else:
# return 'organization'
#
# class Candidate(models.Model):
# endorser_link = models.OneToOneField(Endorser)
# name = models.CharField(max_length=50)
# description = models.TextField()
# color = models.CharField(max_length=6)
# rgb = models.CharField(max_length=13)
# still_running = models.BooleanField(default=False)
# position = models.OneToOneField('Position', blank=True, null=True)
#
# def __unicode__(self):
# return self.name
#
# Path: wikipedia/models.py
# class BulkImport(models.Model):
# slug = models.SlugField(max_length=255)
# created_at = models.DateTimeField(auto_now_add=True)
# text = models.TextField()
#
# def __unicode__(self):
# return str(self.created_at)
#
# class ImportedResult(models.Model):
# bulk_import = models.ForeignKey(BulkImport)
# tag = models.ForeignKey(Tag)
# candidate = models.ForeignKey(Candidate)
# count = models.PositiveIntegerField(default=0)
# percent = models.DecimalField(max_digits=4, decimal_places=2)
#
# class Meta:
# unique_together = ('tag', 'candidate')
, which may contain function names, class names, or code. Output only the next line. | ) |
Predict the next line for this snippet: <|code_start|>
def add_arguments(self, parser):
parser.add_argument(
'--create',
action='store_true',
dest='create',
default=False,
help="Creates everything (otherwise, it's a dry run)",
)
def handle(self, *args, **options):
url = URL.format(slug=SLUG, section=SECTION)
response = requests.get(url)
data = response.json()
text = data['parse']['text']['*']
soup = BS(text)
results = {}
for i, table_row in enumerate(soup.findAll('tr')):
if i < 5 or i > 60:
continue
table_cells = table_row.findAll('td')
assert len(table_cells) == 18
state = table_cells[0].find('a').string
if state == 'Washington, D.C.':
state = 'D.C.'
elif ',' in state:
# It's one of the districts. ignore.
<|code_end|>
with the help of current file imports:
from datetime import datetime
from BeautifulSoup import BeautifulSoup as BS
from django.core.management.base import BaseCommand, CommandError
from endorsements.models import Tag, Candidate
from wikipedia.models import BulkImport, ImportedResult
import requests
and context from other files:
# Path: endorsements/models.py
# class Tag(models.Model):
# name = models.CharField(max_length=50)
# description = models.TextField(blank=True)
# is_personal = models.BooleanField(default=True)
# category = models.ForeignKey(Category, null=True, blank=True)
#
# def __unicode__(self):
# return self.name
#
# class Meta:
# ordering = ['name']
#
# def get_mode(self):
# allow_personal = self.category.allow_personal
# allow_org = self.category.allow_org
# if allow_personal and allow_org:
# return 'none'
# elif allow_personal:
# return 'personal'
# else:
# return 'organization'
#
# class Candidate(models.Model):
# endorser_link = models.OneToOneField(Endorser)
# name = models.CharField(max_length=50)
# description = models.TextField()
# color = models.CharField(max_length=6)
# rgb = models.CharField(max_length=13)
# still_running = models.BooleanField(default=False)
# position = models.OneToOneField('Position', blank=True, null=True)
#
# def __unicode__(self):
# return self.name
#
# Path: wikipedia/models.py
# class BulkImport(models.Model):
# slug = models.SlugField(max_length=255)
# created_at = models.DateTimeField(auto_now_add=True)
# text = models.TextField()
#
# def __unicode__(self):
# return str(self.created_at)
#
# class ImportedResult(models.Model):
# bulk_import = models.ForeignKey(BulkImport)
# tag = models.ForeignKey(Tag)
# candidate = models.ForeignKey(Candidate)
# count = models.PositiveIntegerField(default=0)
# percent = models.DecimalField(max_digits=4, decimal_places=2)
#
# class Meta:
# unique_together = ('tag', 'candidate')
, which may contain function names, class names, or code. Output only the next line. | continue |
Based on the snippet: <|code_start|> response = requests.get(url)
data = response.json()
text = data['parse']['text']['*']
soup = BS(text)
results = {}
for i, table_row in enumerate(soup.findAll('tr')):
if i < 5 or i > 60:
continue
table_cells = table_row.findAll('td')
assert len(table_cells) == 18
state = table_cells[0].find('a').string
if state == 'Washington, D.C.':
state = 'D.C.'
elif ',' in state:
# It's one of the districts. ignore.
continue
elif '(' in state:
state = state.partition('(')[0]
print state
stats = {
'Hillary Clinton': {
'count': int(
(table_cells[2].string or '0').replace(',', '')
),
'percent': float(
<|code_end|>
, predict the immediate next line with the help of imports:
from datetime import datetime
from BeautifulSoup import BeautifulSoup as BS
from django.core.management.base import BaseCommand, CommandError
from endorsements.models import Tag, Candidate
from wikipedia.models import BulkImport, ImportedResult
import requests
and context (classes, functions, sometimes code) from other files:
# Path: endorsements/models.py
# class Tag(models.Model):
# name = models.CharField(max_length=50)
# description = models.TextField(blank=True)
# is_personal = models.BooleanField(default=True)
# category = models.ForeignKey(Category, null=True, blank=True)
#
# def __unicode__(self):
# return self.name
#
# class Meta:
# ordering = ['name']
#
# def get_mode(self):
# allow_personal = self.category.allow_personal
# allow_org = self.category.allow_org
# if allow_personal and allow_org:
# return 'none'
# elif allow_personal:
# return 'personal'
# else:
# return 'organization'
#
# class Candidate(models.Model):
# endorser_link = models.OneToOneField(Endorser)
# name = models.CharField(max_length=50)
# description = models.TextField()
# color = models.CharField(max_length=6)
# rgb = models.CharField(max_length=13)
# still_running = models.BooleanField(default=False)
# position = models.OneToOneField('Position', blank=True, null=True)
#
# def __unicode__(self):
# return self.name
#
# Path: wikipedia/models.py
# class BulkImport(models.Model):
# slug = models.SlugField(max_length=255)
# created_at = models.DateTimeField(auto_now_add=True)
# text = models.TextField()
#
# def __unicode__(self):
# return str(self.created_at)
#
# class ImportedResult(models.Model):
# bulk_import = models.ForeignKey(BulkImport)
# tag = models.ForeignKey(Tag)
# candidate = models.ForeignKey(Candidate)
# count = models.PositiveIntegerField(default=0)
# percent = models.DecimalField(max_digits=4, decimal_places=2)
#
# class Meta:
# unique_together = ('tag', 'candidate')
. Output only the next line. | (table_cells[3].string or '0').strip('%') |
Continue the code snippet: <|code_start|> data = response.json()
text = data['parse']['text']['*']
soup = BS(text)
results = {}
for i, table_row in enumerate(soup.findAll('tr')):
if i < 5 or i > 60:
continue
table_cells = table_row.findAll('td')
assert len(table_cells) == 18
state = table_cells[0].find('a').string
if state == 'Washington, D.C.':
state = 'D.C.'
elif ',' in state:
# It's one of the districts. ignore.
continue
elif '(' in state:
state = state.partition('(')[0]
print state
stats = {
'Hillary Clinton': {
'count': int(
(table_cells[2].string or '0').replace(',', '')
),
'percent': float(
(table_cells[3].string or '0').strip('%')
<|code_end|>
. Use current file imports:
from datetime import datetime
from BeautifulSoup import BeautifulSoup as BS
from django.core.management.base import BaseCommand, CommandError
from endorsements.models import Tag, Candidate
from wikipedia.models import BulkImport, ImportedResult
import requests
and context (classes, functions, or code) from other files:
# Path: endorsements/models.py
# class Tag(models.Model):
# name = models.CharField(max_length=50)
# description = models.TextField(blank=True)
# is_personal = models.BooleanField(default=True)
# category = models.ForeignKey(Category, null=True, blank=True)
#
# def __unicode__(self):
# return self.name
#
# class Meta:
# ordering = ['name']
#
# def get_mode(self):
# allow_personal = self.category.allow_personal
# allow_org = self.category.allow_org
# if allow_personal and allow_org:
# return 'none'
# elif allow_personal:
# return 'personal'
# else:
# return 'organization'
#
# class Candidate(models.Model):
# endorser_link = models.OneToOneField(Endorser)
# name = models.CharField(max_length=50)
# description = models.TextField()
# color = models.CharField(max_length=6)
# rgb = models.CharField(max_length=13)
# still_running = models.BooleanField(default=False)
# position = models.OneToOneField('Position', blank=True, null=True)
#
# def __unicode__(self):
# return self.name
#
# Path: wikipedia/models.py
# class BulkImport(models.Model):
# slug = models.SlugField(max_length=255)
# created_at = models.DateTimeField(auto_now_add=True)
# text = models.TextField()
#
# def __unicode__(self):
# return str(self.created_at)
#
# class ImportedResult(models.Model):
# bulk_import = models.ForeignKey(BulkImport)
# tag = models.ForeignKey(Tag)
# candidate = models.ForeignKey(Candidate)
# count = models.PositiveIntegerField(default=0)
# percent = models.DecimalField(max_digits=4, decimal_places=2)
#
# class Meta:
# unique_together = ('tag', 'candidate')
. Output only the next line. | ), |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.