Instruction stringlengths 362 7.83k | output_code stringlengths 1 945 |
|---|---|
Given the code snippet: <|code_start|>
urlpatterns = patterns('', # NOQA
url(r'^$', PizzaListView.as_view(), name='list'),
url(r'^create/$', PizzaCreateView.as_view(), name='create'),
url(r'^created/$', TemplateView.as_view(
template_name='pizzagigi/pizza_created.html'), name='created'),
url(r'^detail/(?P<pk>[0-9]*)$', PizzaDetailView.as_view(), name='detail'),
url(r'^update/(?P<pk>[0-9]*)$', PizzaUpdateView.as_view(), name='update'),
url(r'^updated/$', TemplateView.as_view(
template_name='pizzagigi/pizza_updated.html'), name='updated'),
url(r'^delete/(?P<pk>[0-9]*)$', PizzaDeleteView.as_view(), name='delete'),
<|code_end|>
, generate the next line using the imports in this file:
from django.conf.urls import patterns, url
from django.views.generic import TemplateView
from .views import (
PizzaCreateView, PizzaDeleteView, PizzaDetailView, PizzaListView,
PizzaUpdateView
)
and context (functions, classes, or occasionally code) from other files:
# Path: test_projects/django18/pizzagigi/views.py
# class PizzaCreateView(CreateView):
#
# model = Pizza
# fields = ['toppings']
# success_url = reverse_lazy('pizza:created')
#
# class PizzaDeleteView(DeleteView):
#
# model = Pizza
# success_url = reverse_lazy('pizza:deleted')
#
# def get_success_url(self):
# return force_text(self.success_url)
#
# class PizzaDetailView(DetailView):
#
# model = Pizza
#
# class PizzaListView(ListView):
#
# queryset = Pizza.objects.order_by('-id')
# context_object_name = 'pizzas'
# paginate_by = 10
#
# class PizzaUpdateView(UpdateView):
#
# model = Pizza
# fields = ['toppings']
# success_url = reverse_lazy('pizza:updated')
. Output only the next line. | url(r'^deleted/$', TemplateView.as_view( |
Predict the next line for this snippet: <|code_start|>
urlpatterns = patterns('', # NOQA
url(r'^$', PizzaListView.as_view(), name='list'),
url(r'^create/$', PizzaCreateView.as_view(), name='create'),
<|code_end|>
with the help of current file imports:
from django.conf.urls import patterns, url
from django.views.generic import TemplateView
from .views import (
PizzaCreateView, PizzaDeleteView, PizzaDetailView, PizzaListView,
PizzaUpdateView
)
and context from other files:
# Path: test_projects/django18/pizzagigi/views.py
# class PizzaCreateView(CreateView):
#
# model = Pizza
# fields = ['toppings']
# success_url = reverse_lazy('pizza:created')
#
# class PizzaDeleteView(DeleteView):
#
# model = Pizza
# success_url = reverse_lazy('pizza:deleted')
#
# def get_success_url(self):
# return force_text(self.success_url)
#
# class PizzaDetailView(DetailView):
#
# model = Pizza
#
# class PizzaListView(ListView):
#
# queryset = Pizza.objects.order_by('-id')
# context_object_name = 'pizzas'
# paginate_by = 10
#
# class PizzaUpdateView(UpdateView):
#
# model = Pizza
# fields = ['toppings']
# success_url = reverse_lazy('pizza:updated')
, which may contain function names, class names, or code. Output only the next line. | url(r'^created/$', TemplateView.as_view( |
Predict the next line for this snippet: <|code_start|>
urlpatterns = patterns('', # NOQA
url(r'^$', PizzaListView.as_view(), name='list'),
url(r'^create/$', PizzaCreateView.as_view(), name='create'),
url(r'^created/$', TemplateView.as_view(
<|code_end|>
with the help of current file imports:
from django.conf.urls import patterns, url
from django.views.generic import TemplateView
from .views import (
PizzaCreateView, PizzaDeleteView, PizzaDetailView, PizzaListView,
PizzaUpdateView
)
and context from other files:
# Path: test_projects/django18/pizzagigi/views.py
# class PizzaCreateView(CreateView):
#
# model = Pizza
# fields = ['toppings']
# success_url = reverse_lazy('pizza:created')
#
# class PizzaDeleteView(DeleteView):
#
# model = Pizza
# success_url = reverse_lazy('pizza:deleted')
#
# def get_success_url(self):
# return force_text(self.success_url)
#
# class PizzaDetailView(DetailView):
#
# model = Pizza
#
# class PizzaListView(ListView):
#
# queryset = Pizza.objects.order_by('-id')
# context_object_name = 'pizzas'
# paginate_by = 10
#
# class PizzaUpdateView(UpdateView):
#
# model = Pizza
# fields = ['toppings']
# success_url = reverse_lazy('pizza:updated')
, which may contain function names, class names, or code. Output only the next line. | template_name='pizzagigi/pizza_created.html'), name='created'), |
Next line prediction: <|code_start|>
urlpatterns = patterns('', # NOQA
url(r'^$', PizzaListView.as_view(), name='list'),
url(r'^create/$', PizzaCreateView.as_view(), name='create'),
<|code_end|>
. Use current file imports:
(from django.conf.urls import patterns, url
from django.views.generic import TemplateView
from .views import (
PizzaCreateView, PizzaDeleteView, PizzaDetailView, PizzaListView,
PizzaUpdateView
))
and context including class names, function names, or small code snippets from other files:
# Path: test_projects/django18/pizzagigi/views.py
# class PizzaCreateView(CreateView):
#
# model = Pizza
# fields = ['toppings']
# success_url = reverse_lazy('pizza:created')
#
# class PizzaDeleteView(DeleteView):
#
# model = Pizza
# success_url = reverse_lazy('pizza:deleted')
#
# def get_success_url(self):
# return force_text(self.success_url)
#
# class PizzaDetailView(DetailView):
#
# model = Pizza
#
# class PizzaListView(ListView):
#
# queryset = Pizza.objects.order_by('-id')
# context_object_name = 'pizzas'
# paginate_by = 10
#
# class PizzaUpdateView(UpdateView):
#
# model = Pizza
# fields = ['toppings']
# success_url = reverse_lazy('pizza:updated')
. Output only the next line. | url(r'^created/$', TemplateView.as_view( |
Predict the next line for this snippet: <|code_start|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
class MyMigrationTestCase(SouthMigrationTestCase):
start_migration = '0001_initial'
dest_migration = '0002_auto__add_field_chickenballs_dips'
django_application = 'suthern'
def test_field_survives_migration(self):
self.migrate_to_dest()
choice_1 = ChickenBalls.HONEY_MUSTARD
order = ChickenBalls()
order.dips = choice_1
<|code_end|>
with the help of current file imports:
from .migration_helpers import SouthMigrationTestCase
from .models import ChickenBalls
and context from other files:
# Path: test_projects/django14/suthern/migration_helpers.py
# class SouthMigrationTestCase(TransactionTestCase):
# """A Test case for testing South migrations."""
#
# # Source:
# # https://micknelson.wordpress.com/2013/03/01/testing-django-migrations/
#
# # These must be defined by subclasses.
# start_migration = None
# dest_migration = None
# django_application = None
#
# def setUp(self):
# super(SouthMigrationTestCase, self).setUp()
# migrations = Migrations(self.django_application)
# self.start_orm = migrations[self.start_migration].orm()
# self.dest_orm = migrations[self.dest_migration].orm()
#
# # Ensure the migration history is up-to-date with a fake migration.
# # The other option would be to use the south setting for these tests
# # so that the migrations are used to setup the test db.
# call_command('migrate', self.django_application, fake=True,
# verbosity=0)
# # Then migrate back to the start migration.
# call_command('migrate', self.django_application, self.start_migration,
# verbosity=0)
#
# def tearDown(self):
# # Leave the db in the final state so that the test runner doesn't
# # error when truncating the database.
# call_command('migrate', self.django_application, verbosity=0)
#
# def migrate_to_dest(self):
# call_command('migrate', self.django_application, self.dest_migration,
# verbosity=0)
#
# Path: test_projects/django14/suthern/models.py
# class ChickenBalls(models.Model):
# """ChickenBalls is used for South migration testing"""
#
# SUICIDE = 's'
# HOT = 'h'
# HOME_STYLE = 'H'
# CAJUN = 'c'
# JERK = 'j'
# GATOR = 'g'
# FLAVOUR_CHOICES = (
# (_('Hot & Spicy'), (
# (SUICIDE, _('Suicide hot')),
# (HOT, _('Hot hot sauce')),
# (CAJUN, _('Cajun sauce')),
# (JERK, _('Jerk sauce')))),
# (_('Traditional'), (
# (HOME_STYLE, _('Homestyle')),
# (GATOR, _('Gator flavour')))),
# )
# flavour = SelectMultipleField(
# blank=True,
# include_blank=False,
# max_length=5,
# max_choices=2,
# choices=FLAVOUR_CHOICES
# )
# RANCH = 'r'
# HONEY_MUSTARD = 'h'
# BBQ = 'b'
# DIP_CHOICES = (
# (RANCH, _('Ranch')),
# (HONEY_MUSTARD, _('Honey mustard')),
# (BBQ, _('BBQ')),
# )
# dips = SelectMultipleField(
# blank=True,
# default='',
# include_blank=False,
# max_length=6,
# max_choices=3,
# choices=DIP_CHOICES
# )
#
# def __str__(self):
# return "pk=%s" % force_text(self.pk)
#
# def get_absolute_url(self):
# return reverse('ftw:detail', args=[self.pk])
, which may contain function names, class names, or code. Output only the next line. | order.save() |
Here is a snippet: <|code_start|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
try:
except ImportError:
def deconstructible(x):
return x
@deconstructible
class MaxChoicesValidator(validators.BaseValidator):
message = ungettext_lazy(
<|code_end|>
. Write the next line using the current file imports:
from django.core import validators
from django.utils.encoding import force_text
from django.utils.translation import ungettext_lazy
from django.utils.deconstruct import deconstructible
from .codecs import encode_list_to_csv
and context from other files:
# Path: select_multiple_field/codecs.py
# def encode_list_to_csv(decoded):
# """
# Encodes a Python list to a delimiter separated string
#
# Note: This sorts the list lexicographically
# """
# delimiter = getattr(
# settings, 'SELECTMULTIPLEFIELD_DELIMITER', DEFAULT_DELIMITER)
# decoded = sorted(set(decoded))
# return delimiter.join(decoded)
, which may include functions, classes, or code. Output only the next line. | 'Ensure this value has at most %(limit_value)d choice (it has %(show_value)d).', # NOQA |
Continue the code snippet: <|code_start|> reverse('pizza:create'),
urlencode(MultiValueDict(data), doseq=True),
content_type='application/x-www-form-urlencoded'
)
self.assertEqual(response.status_code, 302)
self.assertRedirects(
response,
'http://testserver' + reverse('pizza:created'))
p = Pizza.objects.all()[0]
self.assertIn(Pizza.MOZZARELLA, p.toppings)
self.assertIn(Pizza.PANCETTA, p.toppings)
class PizzaDetailViewTestCase(TestCase):
def setUp(self):
self.pizza = Pizza(toppings=[Pizza.EGG])
self.pizza.save()
def test_view(self):
response = self.client.get(
reverse('pizza:detail', args=[self.pizza.id]))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['object'], self.pizza)
class PizzaUpdateViewTestCase(TestCase):
def setUp(self):
self.pizza = Pizza(toppings=[Pizza.MUSHROOMS, Pizza.TOMATO])
<|code_end|>
. Use current file imports:
import json
from django.core import serializers
from django.core.urlresolvers import reverse
from django.test import SimpleTestCase, TestCase
from django.utils.datastructures import MultiValueDict
from django.utils.http import urlencode
from .models import Pizza, show_topping
and context (classes, functions, or code) from other files:
# Path: test_projects/django18/pizzagigi/models.py
# class Pizza(models.Model):
# """Pizza demonstrates minimal use-case"""
#
# ANCHOVIES = 'a'
# BLACK_OLIVES = 'b'
# CHEDDAR_CHEESE = 'c'
# EGG = 'e'
# PANCETTA = 'pk'
# PEPPERONI = 'p'
# PROSCIUTTO_CRUDO = 'P'
# MOZZARELLA = 'm'
# MUSHROOMS = 'M'
# TOMATO = 't'
# TOPPING_CHOICES = (
# (ANCHOVIES, _('Anchovies')),
# (BLACK_OLIVES, _('Black olives')),
# (CHEDDAR_CHEESE, _('Cheddar cheese')),
# (EGG, _('Eggs')),
# (PANCETTA, _('Pancetta')),
# (PEPPERONI, _('Pepperoni')),
# (PROSCIUTTO_CRUDO, _('Prosciutto crudo')),
# (MOZZARELLA, _('Mozzarella')),
# (MUSHROOMS, _('Mushrooms')),
# (TOMATO, _('Tomato')),
# )
#
# toppings = SelectMultipleField(
# max_length=10,
# choices=TOPPING_CHOICES
# )
#
# def get_toppings(self):
# if self.toppings:
# keys_choices = self.toppings
# return '%s' % (', '.join(filter(bool, keys_choices)))
# get_toppings.short_description = _('Toppings')
#
# def __str__(self):
# return "pk=%s" % force_text(self.pk)
#
# def get_absolute_url(self):
# return reverse('pizza:detail', args=[self.pk])
#
# def show_topping(ingredient):
# """
# Decode topping to full name
# """
# decoder = dict(Pizza.TOPPING_CHOICES)
# return force_text(decoder[ingredient])
. Output only the next line. | self.pizza.save() |
Continue the code snippet: <|code_start|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
class PizzaListViewTestCase(TestCase):
def test_no_pizzas(self):
p = Pizza.objects.all()
self.assertEqual(len(p), 0, 'Test requires no pizzas')
response = self.client.get(reverse('pizza:list'))
self.assertEqual(response.status_code, 200)
self.assertTrue('No pizzas found' in response.content.decode('utf-8'))
def test_many_pizzas(self):
NUM_PIZZAS = 30
pizzas = []
for n in range(NUM_PIZZAS):
p = Pizza.objects.create(toppings=[Pizza.PEPPERONI])
pizzas.append(p)
<|code_end|>
. Use current file imports:
import json
from django.core import serializers
from django.core.urlresolvers import reverse
from django.test import SimpleTestCase, TestCase
from django.utils.datastructures import MultiValueDict
from django.utils.http import urlencode
from .models import Pizza, show_topping
and context (classes, functions, or code) from other files:
# Path: test_projects/django18/pizzagigi/models.py
# class Pizza(models.Model):
# """Pizza demonstrates minimal use-case"""
#
# ANCHOVIES = 'a'
# BLACK_OLIVES = 'b'
# CHEDDAR_CHEESE = 'c'
# EGG = 'e'
# PANCETTA = 'pk'
# PEPPERONI = 'p'
# PROSCIUTTO_CRUDO = 'P'
# MOZZARELLA = 'm'
# MUSHROOMS = 'M'
# TOMATO = 't'
# TOPPING_CHOICES = (
# (ANCHOVIES, _('Anchovies')),
# (BLACK_OLIVES, _('Black olives')),
# (CHEDDAR_CHEESE, _('Cheddar cheese')),
# (EGG, _('Eggs')),
# (PANCETTA, _('Pancetta')),
# (PEPPERONI, _('Pepperoni')),
# (PROSCIUTTO_CRUDO, _('Prosciutto crudo')),
# (MOZZARELLA, _('Mozzarella')),
# (MUSHROOMS, _('Mushrooms')),
# (TOMATO, _('Tomato')),
# )
#
# toppings = SelectMultipleField(
# max_length=10,
# choices=TOPPING_CHOICES
# )
#
# def get_toppings(self):
# if self.toppings:
# keys_choices = self.toppings
# return '%s' % (', '.join(filter(bool, keys_choices)))
# get_toppings.short_description = _('Toppings')
#
# def __str__(self):
# return "pk=%s" % force_text(self.pk)
#
# def get_absolute_url(self):
# return reverse('pizza:detail', args=[self.pk])
#
# def show_topping(ingredient):
# """
# Decode topping to full name
# """
# decoder = dict(Pizza.TOPPING_CHOICES)
# return force_text(decoder[ingredient])
. Output only the next line. | self.assertEqual(len(pizzas), NUM_PIZZAS, 'Test requires pizzas') |
Predict the next line after this snippet: <|code_start|> response,
'http://testserver' + reverse('ftw:created'))
p = ChickenWings.objects.all()[0]
self.assertIn(ChickenWings.JERK, p.flavour)
def test_creation_two_choices(self):
data = {
'flavour': [ChickenWings.SUICIDE, ChickenWings.BOURBON]
}
response = self.client.post(
reverse('ftw:create'),
urlencode(MultiValueDict(data), doseq=True),
content_type='application/x-www-form-urlencoded'
)
self.assertRedirects(
response,
'http://testserver' + reverse('ftw:created'))
p = ChickenWings.objects.all()[0]
self.assertIn(ChickenWings.SUICIDE, p.flavour)
self.assertIn(ChickenWings.BOURBON, p.flavour)
def test_creation_too_many_choices(self):
data = {
'flavour': [
ChickenWings.CAJUN, ChickenWings.BOURBON, ChickenWings.MILD]
}
response = self.client.post(
reverse('ftw:create'),
urlencode(MultiValueDict(data), doseq=True),
content_type='application/x-www-form-urlencoded')
<|code_end|>
using the current file's imports:
from django.core.urlresolvers import reverse
from django.test import SimpleTestCase, TestCase
from django.utils.datastructures import MultiValueDict
from django.utils.http import urlencode
from .models import ChickenWings, show_flavour
and any relevant context from other files:
# Path: test_projects/django18/forthewing/models.py
# class ChickenWings(models.Model):
# """ChickenWings demonstrates optgroup usage and max_choices"""
#
# SUICIDE = 's'
# HOT = 'h'
# MEDIUM = 'm'
# MILD = 'M'
# CAJUN = 'c'
# JERK = 'j'
# HONEY_GARLIC = 'g'
# HONEY_BBQ = 'H'
# THAI = 't'
# BACON = 'b'
# BOURBON = 'B'
# FLAVOUR_CHOICES = (
# (_('Hot & Spicy'), (
# (SUICIDE, _('Suicide hot')),
# (HOT, _('Hot hot sauce')),
# (MEDIUM, _('Medium hot sauce')),
# (MILD, _('Mild hot sauce')),
# (CAJUN, _('Cajun sauce')),
# (JERK, _('Jerk sauce')))),
# (_('Sweets'), (
# (HONEY_GARLIC, _('Honey garlic')),
# (HONEY_BBQ, _('Honey barbeque')),
# (THAI, _('Thai sweet sauce')),
# (BACON, _('Messy bacon sauce')),
# (BOURBON, _('Bourbon whiskey barbeque')))),
# )
# flavour = SelectMultipleField(
# blank=True,
# include_blank=False,
# max_length=5,
# max_choices=2,
# choices=FLAVOUR_CHOICES
# )
#
# def __str__(self):
# return "pk=%s" % force_text(self.pk)
#
# def get_absolute_url(self):
# return reverse('ftw:detail', args=[self.pk])
#
# def show_flavour(flavour):
# """
# Decode flavour to full name
#
# This supports both plain choices and optgroup choices
# """
# decoder = dict()
# for c in ChickenWings.FLAVOUR_CHOICES:
# if isinstance(c[1], (tuple, list)):
# for k, v in c[1]:
# decoder[k] = v
# else:
# decoder[c[0]] = c[1]
#
# if flavour in decoder:
# return force_text(decoder[flavour])
# else:
# return force_text('')
. Output only the next line. | self.assertEqual(response.status_code, 200) |
Next line prediction: <|code_start|> p = ChickenWings.objects.all()
self.assertEqual(len(p), 0, 'Test requires no wings')
response = self.client.get(reverse('ftw:list'))
self.assertEqual(response.status_code, 200)
self.assertTrue(
'No chicken wings found' in response.content.decode('utf-8'))
def test_many_chickenwings(self):
NUM_WINGS = 30
wings = []
for n in range(NUM_WINGS):
p = ChickenWings.objects.create(flavour=[ChickenWings.HONEY_BBQ])
wings.append(p)
self.assertEqual(len(wings), NUM_WINGS, 'Test requires chicken wings')
response = self.client.get(reverse('ftw:list'))
self.assertEqual(response.status_code, 200)
self.assertTrue(
(ChickenWings.HONEY_BBQ) in response.content.decode('utf-8'))
class ChickenWingsCreateViewTestCase(TestCase):
def test_view(self):
response = self.client.get(reverse('ftw:create'))
self.assertEqual(response.status_code, 200)
def test_creation_single(self):
data = {
'flavour': [ChickenWings.JERK]
<|code_end|>
. Use current file imports:
(from django.core.urlresolvers import reverse
from django.test import SimpleTestCase, TestCase
from django.utils.datastructures import MultiValueDict
from django.utils.http import urlencode
from .models import ChickenWings, show_flavour)
and context including class names, function names, or small code snippets from other files:
# Path: test_projects/django18/forthewing/models.py
# class ChickenWings(models.Model):
# """ChickenWings demonstrates optgroup usage and max_choices"""
#
# SUICIDE = 's'
# HOT = 'h'
# MEDIUM = 'm'
# MILD = 'M'
# CAJUN = 'c'
# JERK = 'j'
# HONEY_GARLIC = 'g'
# HONEY_BBQ = 'H'
# THAI = 't'
# BACON = 'b'
# BOURBON = 'B'
# FLAVOUR_CHOICES = (
# (_('Hot & Spicy'), (
# (SUICIDE, _('Suicide hot')),
# (HOT, _('Hot hot sauce')),
# (MEDIUM, _('Medium hot sauce')),
# (MILD, _('Mild hot sauce')),
# (CAJUN, _('Cajun sauce')),
# (JERK, _('Jerk sauce')))),
# (_('Sweets'), (
# (HONEY_GARLIC, _('Honey garlic')),
# (HONEY_BBQ, _('Honey barbeque')),
# (THAI, _('Thai sweet sauce')),
# (BACON, _('Messy bacon sauce')),
# (BOURBON, _('Bourbon whiskey barbeque')))),
# )
# flavour = SelectMultipleField(
# blank=True,
# include_blank=False,
# max_length=5,
# max_choices=2,
# choices=FLAVOUR_CHOICES
# )
#
# def __str__(self):
# return "pk=%s" % force_text(self.pk)
#
# def get_absolute_url(self):
# return reverse('ftw:detail', args=[self.pk])
#
# def show_flavour(flavour):
# """
# Decode flavour to full name
#
# This supports both plain choices and optgroup choices
# """
# decoder = dict()
# for c in ChickenWings.FLAVOUR_CHOICES:
# if isinstance(c[1], (tuple, list)):
# for k, v in c[1]:
# decoder[k] = v
# else:
# decoder[c[0]] = c[1]
#
# if flavour in decoder:
# return force_text(decoder[flavour])
# else:
# return force_text('')
. Output only the next line. | } |
Given the code snippet: <|code_start|>#-*- coding: utf-8 -*-
class UdpServer(threading.Thread):
RECV_SIZE = 262144
def __init__(self, port, ip="0.0.0.0"):
threading.Thread.__init__(self)
self.stopflag = False
self.ip, self.port = ip, port
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self._id = id(self.sock)
<|code_end|>
, generate the next line using the imports in this file:
import socket
import threading
import select
import signals
from log import logger
from SigObject import sigObject
from log import logger
from net import socktypes
and context (functions, classes, or occasionally code) from other files:
# Path: log.py
# class __NullLogger:
# def debug(self, msg=""): pass
# def warning(self, msg=""): pass
# def info(self, msg=""): pass
# def error(self, msg=""): pass
# def fatal(self, msg=""): pass
#
# Path: SigObject.py
# class SigObject(QObject):
# def __init__(self):
#
# Path: log.py
# class __NullLogger:
# def debug(self, msg=""): pass
# def warning(self, msg=""): pass
# def info(self, msg=""): pass
# def error(self, msg=""): pass
# def fatal(self, msg=""): pass
#
# Path: net/socktypes.py
# TCP_SERVER_BASE_TYPE = 0x00
# TCP_CLIENT_BASE_TYPE = 0x10
# UDP_SERVER_BASE_TYPE = 0x20
# UDP_CLIENT_BASE_TYPE = 0x30
# TCP_SERVER = TCP_SERVER_BASE_TYPE | 0x01
# TCP_CLIENT_REMOTE = TCP_SERVER_BASE_TYPE | 0x02
# TCP_CLIENT_LOCAL = TCP_CLIENT_BASE_TYPE | 0x10
# UDP_SERVER = UDP_SERVER_BASE_TYPE | 0x00 # it's just like a sever
# UDP_CLIENT_REMOTE = UDP_SERVER_BASE_TYPE | 0x01
# UDP_CLIENT_LOCAL = UDP_CLIENT_BASE_TYPE | 0x01
. Output only the next line. | self.remoteAddr = None |
Given the following code snippet before the placeholder: <|code_start|> def getSockType(self):
return socktypes.UDP_SERVER
def isConnected(self):
return self.remoteAddr
def sendall(self, data):
if not self.remoteAddr:
logger.warning("no remote udp client")
return
if not isinstance(data, basestring):
return
n = self.sock.sendto(data, self.remoteAddr)
total = len(data)
if n < total:
logger.warning("not all data sent")
return n == total
def stop(self):
self.stopflag = True
def send(self, data):
self.sock.sendto()
def close(self):
logger.debug("udp server closed")
self.sock.close()
<|code_end|>
, predict the next line using imports from the current file:
import socket
import threading
import select
import signals
from log import logger
from SigObject import sigObject
from log import logger
from net import socktypes
and context including class names, function names, and sometimes code from other files:
# Path: log.py
# class __NullLogger:
# def debug(self, msg=""): pass
# def warning(self, msg=""): pass
# def info(self, msg=""): pass
# def error(self, msg=""): pass
# def fatal(self, msg=""): pass
#
# Path: SigObject.py
# class SigObject(QObject):
# def __init__(self):
#
# Path: log.py
# class __NullLogger:
# def debug(self, msg=""): pass
# def warning(self, msg=""): pass
# def info(self, msg=""): pass
# def error(self, msg=""): pass
# def fatal(self, msg=""): pass
#
# Path: net/socktypes.py
# TCP_SERVER_BASE_TYPE = 0x00
# TCP_CLIENT_BASE_TYPE = 0x10
# UDP_SERVER_BASE_TYPE = 0x20
# UDP_CLIENT_BASE_TYPE = 0x30
# TCP_SERVER = TCP_SERVER_BASE_TYPE | 0x01
# TCP_CLIENT_REMOTE = TCP_SERVER_BASE_TYPE | 0x02
# TCP_CLIENT_LOCAL = TCP_CLIENT_BASE_TYPE | 0x10
# UDP_SERVER = UDP_SERVER_BASE_TYPE | 0x00 # it's just like a sever
# UDP_CLIENT_REMOTE = UDP_SERVER_BASE_TYPE | 0x01
# UDP_CLIENT_LOCAL = UDP_CLIENT_BASE_TYPE | 0x01
. Output only the next line. | def run(self): |
Given snippet: <|code_start|>#-*- coding: utf-8 -*-
class UdpServer(threading.Thread):
RECV_SIZE = 262144
def __init__(self, port, ip="0.0.0.0"):
threading.Thread.__init__(self)
self.stopflag = False
self.ip, self.port = ip, port
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import socket
import threading
import select
import signals
from log import logger
from SigObject import sigObject
from log import logger
from net import socktypes
and context:
# Path: log.py
# class __NullLogger:
# def debug(self, msg=""): pass
# def warning(self, msg=""): pass
# def info(self, msg=""): pass
# def error(self, msg=""): pass
# def fatal(self, msg=""): pass
#
# Path: SigObject.py
# class SigObject(QObject):
# def __init__(self):
#
# Path: log.py
# class __NullLogger:
# def debug(self, msg=""): pass
# def warning(self, msg=""): pass
# def info(self, msg=""): pass
# def error(self, msg=""): pass
# def fatal(self, msg=""): pass
#
# Path: net/socktypes.py
# TCP_SERVER_BASE_TYPE = 0x00
# TCP_CLIENT_BASE_TYPE = 0x10
# UDP_SERVER_BASE_TYPE = 0x20
# UDP_CLIENT_BASE_TYPE = 0x30
# TCP_SERVER = TCP_SERVER_BASE_TYPE | 0x01
# TCP_CLIENT_REMOTE = TCP_SERVER_BASE_TYPE | 0x02
# TCP_CLIENT_LOCAL = TCP_CLIENT_BASE_TYPE | 0x10
# UDP_SERVER = UDP_SERVER_BASE_TYPE | 0x00 # it's just like a sever
# UDP_CLIENT_REMOTE = UDP_SERVER_BASE_TYPE | 0x01
# UDP_CLIENT_LOCAL = UDP_CLIENT_BASE_TYPE | 0x01
which might include code, classes, or functions. Output only the next line. | self._id = id(self.sock) |
Given the code snippet: <|code_start|>#-*- coding: utf-8 -*-
class TcpServerManager(ServerManager):
def __init__(self):
ServerManager.__init__(self)
<|code_end|>
, generate the next line using the imports in this file:
from log import logger
from ServerManager import ServerManager
from net import socktypes
and context (functions, classes, or occasionally code) from other files:
# Path: log.py
# class __NullLogger:
# def debug(self, msg=""): pass
# def warning(self, msg=""): pass
# def info(self, msg=""): pass
# def error(self, msg=""): pass
# def fatal(self, msg=""): pass
#
# Path: net/socktypes.py
# TCP_SERVER_BASE_TYPE = 0x00
# TCP_CLIENT_BASE_TYPE = 0x10
# UDP_SERVER_BASE_TYPE = 0x20
# UDP_CLIENT_BASE_TYPE = 0x30
# TCP_SERVER = TCP_SERVER_BASE_TYPE | 0x01
# TCP_CLIENT_REMOTE = TCP_SERVER_BASE_TYPE | 0x02
# TCP_CLIENT_LOCAL = TCP_CLIENT_BASE_TYPE | 0x10
# UDP_SERVER = UDP_SERVER_BASE_TYPE | 0x00 # it's just like a sever
# UDP_CLIENT_REMOTE = UDP_SERVER_BASE_TYPE | 0x01
# UDP_CLIENT_LOCAL = UDP_CLIENT_BASE_TYPE | 0x01
. Output only the next line. | def create(self, ip, port): |
Given the following code snippet before the placeholder: <|code_start|>#-*- coding: utf-8 -*-
class TcpServerManager(ServerManager):
def __init__(self):
ServerManager.__init__(self)
<|code_end|>
, predict the next line using imports from the current file:
from log import logger
from ServerManager import ServerManager
from net import socktypes
and context including class names, function names, and sometimes code from other files:
# Path: log.py
# class __NullLogger:
# def debug(self, msg=""): pass
# def warning(self, msg=""): pass
# def info(self, msg=""): pass
# def error(self, msg=""): pass
# def fatal(self, msg=""): pass
#
# Path: net/socktypes.py
# TCP_SERVER_BASE_TYPE = 0x00
# TCP_CLIENT_BASE_TYPE = 0x10
# UDP_SERVER_BASE_TYPE = 0x20
# UDP_CLIENT_BASE_TYPE = 0x30
# TCP_SERVER = TCP_SERVER_BASE_TYPE | 0x01
# TCP_CLIENT_REMOTE = TCP_SERVER_BASE_TYPE | 0x02
# TCP_CLIENT_LOCAL = TCP_CLIENT_BASE_TYPE | 0x10
# UDP_SERVER = UDP_SERVER_BASE_TYPE | 0x00 # it's just like a sever
# UDP_CLIENT_REMOTE = UDP_SERVER_BASE_TYPE | 0x01
# UDP_CLIENT_LOCAL = UDP_CLIENT_BASE_TYPE | 0x01
. Output only the next line. | def create(self, ip, port): |
Given the following code snippet before the placeholder: <|code_start|>#-*- coding: utf-8 -*-
class ServerManager(object):
def __init__(self):
self.serverDict = {}
def create_(self, ip, port, sockType):
# Check ip and port
server = None
if sockType == socktypes.TCP_SERVER:
server = TcpServer(port, ip)
elif sockType == socktypes.UDP_SERVER:
server = UdpServer(port, ip)
else:
logger.error("sockType not supported")
<|code_end|>
, predict the next line using imports from the current file:
from log import logger
from TcpServer import TcpServer
from UdpServer import UdpServer
from net import socktypes
and context including class names, function names, and sometimes code from other files:
# Path: log.py
# class __NullLogger:
# def debug(self, msg=""): pass
# def warning(self, msg=""): pass
# def info(self, msg=""): pass
# def error(self, msg=""): pass
# def fatal(self, msg=""): pass
#
# Path: net/socktypes.py
# TCP_SERVER_BASE_TYPE = 0x00
# TCP_CLIENT_BASE_TYPE = 0x10
# UDP_SERVER_BASE_TYPE = 0x20
# UDP_CLIENT_BASE_TYPE = 0x30
# TCP_SERVER = TCP_SERVER_BASE_TYPE | 0x01
# TCP_CLIENT_REMOTE = TCP_SERVER_BASE_TYPE | 0x02
# TCP_CLIENT_LOCAL = TCP_CLIENT_BASE_TYPE | 0x10
# UDP_SERVER = UDP_SERVER_BASE_TYPE | 0x00 # it's just like a sever
# UDP_CLIENT_REMOTE = UDP_SERVER_BASE_TYPE | 0x01
# UDP_CLIENT_LOCAL = UDP_CLIENT_BASE_TYPE | 0x01
. Output only the next line. | return 0, "" |
Predict the next line for this snippet: <|code_start|> def __init__(self):
self.serverDict = {}
def create_(self, ip, port, sockType):
# Check ip and port
server = None
if sockType == socktypes.TCP_SERVER:
server = TcpServer(port, ip)
elif sockType == socktypes.UDP_SERVER:
server = UdpServer(port, ip)
else:
logger.error("sockType not supported")
return 0, ""
_id = server.getId()
if self.serverDict.has_key(_id):
logger.error("server already exists")
del server
return 0, ""
if not server.start_():
return 0, ""
self.serverDict[_id] = server
logger.info("server create ok")
return _id, server.getAddress()
def removeServer(self, _id):
server = self.serverDict.get(_id)
<|code_end|>
with the help of current file imports:
from log import logger
from TcpServer import TcpServer
from UdpServer import UdpServer
from net import socktypes
and context from other files:
# Path: log.py
# class __NullLogger:
# def debug(self, msg=""): pass
# def warning(self, msg=""): pass
# def info(self, msg=""): pass
# def error(self, msg=""): pass
# def fatal(self, msg=""): pass
#
# Path: net/socktypes.py
# TCP_SERVER_BASE_TYPE = 0x00
# TCP_CLIENT_BASE_TYPE = 0x10
# UDP_SERVER_BASE_TYPE = 0x20
# UDP_CLIENT_BASE_TYPE = 0x30
# TCP_SERVER = TCP_SERVER_BASE_TYPE | 0x01
# TCP_CLIENT_REMOTE = TCP_SERVER_BASE_TYPE | 0x02
# TCP_CLIENT_LOCAL = TCP_CLIENT_BASE_TYPE | 0x10
# UDP_SERVER = UDP_SERVER_BASE_TYPE | 0x00 # it's just like a sever
# UDP_CLIENT_REMOTE = UDP_SERVER_BASE_TYPE | 0x01
# UDP_CLIENT_LOCAL = UDP_CLIENT_BASE_TYPE | 0x01
, which may contain function names, class names, or code. Output only the next line. | if server is None: |
Continue the code snippet: <|code_start|> return self.getBaseItemBySocketType(socktypes.TCP_SERVER)
def getBaseTcpClientItem(self):
return self.getBaseItemBySocketType(socktypes.TCP_CLIENT_LOCAL)
def addSocketItem(self, _id, address, sockType, icon):
parentItem = self.getBaseItemBySocketType(sockType)
if not parentItem:
logger.error("parent sock item is None")
return
item = SockTreeItem(sockType, address, _id, icon)
self.sockItemDict[_id] = item
parentItem.addChild(item)
self.setItemExpanded(parentItem, True)
self.setCurrentItem(item)
def addTcpServer(self, _id, address):
self.addSocketItem(_id, address, socktypes.TCP_SERVER, config.TCP_SERVER_ICON)
def addUdpServer(self, _id, address):
self.addSocketItem(_id, address, socktypes.UDP_SERVER, config.UDP_SERVER_ICON)
def addLocalTcpClient(self, _id, address):
self.addSocketItem(_id, address, socktypes.TCP_CLIENT_LOCAL, config.TCP_CLIENT_ICON_LOCAL)
def addLocalUdpClient(self, _id, address):
self.addSocketItem(_id, address, socktypes.UDP_CLIENT_LOCAL, config.UDP_CLIENT_ICON_LOCAL)
def addRemoteTcpClient(self, serverId, _id, address, port):
<|code_end|>
. Use current file imports:
from log import logger
from ui.AppIcons import *
from SockTreeItem import SockTreeItem
from net import socktypes
from PyQt4.QtGui import (
QTreeWidget,
QIcon )
import config
and context (classes, functions, or code) from other files:
# Path: log.py
# class __NullLogger:
# def debug(self, msg=""): pass
# def warning(self, msg=""): pass
# def info(self, msg=""): pass
# def error(self, msg=""): pass
# def fatal(self, msg=""): pass
#
# Path: net/socktypes.py
# TCP_SERVER_BASE_TYPE = 0x00
# TCP_CLIENT_BASE_TYPE = 0x10
# UDP_SERVER_BASE_TYPE = 0x20
# UDP_CLIENT_BASE_TYPE = 0x30
# TCP_SERVER = TCP_SERVER_BASE_TYPE | 0x01
# TCP_CLIENT_REMOTE = TCP_SERVER_BASE_TYPE | 0x02
# TCP_CLIENT_LOCAL = TCP_CLIENT_BASE_TYPE | 0x10
# UDP_SERVER = UDP_SERVER_BASE_TYPE | 0x00 # it's just like a sever
# UDP_CLIENT_REMOTE = UDP_SERVER_BASE_TYPE | 0x01
# UDP_CLIENT_LOCAL = UDP_CLIENT_BASE_TYPE | 0x01
. Output only the next line. | parentItem = self.getSockItemById(serverId) |
Using the snippet: <|code_start|> if not parentItem:
logger.error("parent sock item is None")
return
item = SockTreeItem(sockType, address, _id, icon)
self.sockItemDict[_id] = item
parentItem.addChild(item)
self.setItemExpanded(parentItem, True)
self.setCurrentItem(item)
def addTcpServer(self, _id, address):
self.addSocketItem(_id, address, socktypes.TCP_SERVER, config.TCP_SERVER_ICON)
def addUdpServer(self, _id, address):
self.addSocketItem(_id, address, socktypes.UDP_SERVER, config.UDP_SERVER_ICON)
def addLocalTcpClient(self, _id, address):
self.addSocketItem(_id, address, socktypes.TCP_CLIENT_LOCAL, config.TCP_CLIENT_ICON_LOCAL)
def addLocalUdpClient(self, _id, address):
self.addSocketItem(_id, address, socktypes.UDP_CLIENT_LOCAL, config.UDP_CLIENT_ICON_LOCAL)
def addRemoteTcpClient(self, serverId, _id, address, port):
parentItem = self.getSockItemById(serverId)
if parentItem is None:
logger.error("parentItem is None")
return
item = SockTreeItem(socktypes.TCP_CLIENT_REMOTE, "%s:%d" % (address, port), _id, config.TCP_CLIENT_ICON_REMOTE)
self.sockItemDict[_id] = item
<|code_end|>
, determine the next line of code. You have imports:
from log import logger
from ui.AppIcons import *
from SockTreeItem import SockTreeItem
from net import socktypes
from PyQt4.QtGui import (
QTreeWidget,
QIcon )
import config
and context (class names, function names, or code) available:
# Path: log.py
# class __NullLogger:
# def debug(self, msg=""): pass
# def warning(self, msg=""): pass
# def info(self, msg=""): pass
# def error(self, msg=""): pass
# def fatal(self, msg=""): pass
#
# Path: net/socktypes.py
# TCP_SERVER_BASE_TYPE = 0x00
# TCP_CLIENT_BASE_TYPE = 0x10
# UDP_SERVER_BASE_TYPE = 0x20
# UDP_CLIENT_BASE_TYPE = 0x30
# TCP_SERVER = TCP_SERVER_BASE_TYPE | 0x01
# TCP_CLIENT_REMOTE = TCP_SERVER_BASE_TYPE | 0x02
# TCP_CLIENT_LOCAL = TCP_CLIENT_BASE_TYPE | 0x10
# UDP_SERVER = UDP_SERVER_BASE_TYPE | 0x00 # it's just like a sever
# UDP_CLIENT_REMOTE = UDP_SERVER_BASE_TYPE | 0x01
# UDP_CLIENT_LOCAL = UDP_CLIENT_BASE_TYPE | 0x01
. Output only the next line. | parentItem.addChild(item) |
Given snippet: <|code_start|>#-*- coding: utf-8 -*-
class CreateTcpServerDialog(CreateDialog):
def __init__(self, parent=None):
CreateDialog.__init__(self)
self.okSig = signals.SIG_CREATE_TCP_SERVER
self.ui = Ui_CreateTcpServerForm()
self.ui.setupUi(self)
self.initIpList()
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import utils
import signals
import config
import error
from CreateDialog import CreateDialog
from ui.Ui_CreateTcpServerForm import Ui_CreateTcpServerForm
from PyQt4 import QtGui
from PyQt4 import QtCore
and context:
# Path: ui/Ui_CreateTcpServerForm.py
# class Ui_CreateTcpServerForm(object):
# def setupUi(self, CreateTcpServerForm):
# CreateTcpServerForm.setObjectName(_fromUtf8("CreateTcpServerForm"))
# CreateTcpServerForm.resize(344, 147)
# self.gridLayout = QtGui.QGridLayout(CreateTcpServerForm)
# self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
# spacerItem = QtGui.QSpacerItem(20, 29, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
# self.gridLayout.addItem(spacerItem, 0, 0, 1, 1)
# self.horizontalLayout_3 = QtGui.QHBoxLayout()
# self.horizontalLayout_3.setObjectName(_fromUtf8("horizontalLayout_3"))
# self.label_2 = QtGui.QLabel(CreateTcpServerForm)
# self.label_2.setMaximumSize(QtCore.QSize(54, 16777215))
# self.label_2.setObjectName(_fromUtf8("label_2"))
# self.horizontalLayout_3.addWidget(self.label_2)
# self.ipCmbBox = QtGui.QComboBox(CreateTcpServerForm)
# self.ipCmbBox.setObjectName(_fromUtf8("ipCmbBox"))
# self.horizontalLayout_3.addWidget(self.ipCmbBox)
# self.gridLayout.addLayout(self.horizontalLayout_3, 1, 0, 1, 1)
# self.horizontalLayout = QtGui.QHBoxLayout()
# self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
# self.label = QtGui.QLabel(CreateTcpServerForm)
# self.label.setMaximumSize(QtCore.QSize(54, 16777215))
# self.label.setObjectName(_fromUtf8("label"))
# self.horizontalLayout.addWidget(self.label)
# self.portCmbBox = QtGui.QComboBox(CreateTcpServerForm)
# self.portCmbBox.setInputMethodHints(QtCore.Qt.ImhDigitsOnly)
# self.portCmbBox.setEditable(True)
# self.portCmbBox.setObjectName(_fromUtf8("portCmbBox"))
# self.horizontalLayout.addWidget(self.portCmbBox)
# self.gridLayout.addLayout(self.horizontalLayout, 2, 0, 1, 1)
# spacerItem1 = QtGui.QSpacerItem(20, 29, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
# self.gridLayout.addItem(spacerItem1, 3, 0, 1, 1)
# self.horizontalLayout_2 = QtGui.QHBoxLayout()
# self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
# spacerItem2 = QtGui.QSpacerItem(68, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
# self.horizontalLayout_2.addItem(spacerItem2)
# self.okBtn = QtGui.QPushButton(CreateTcpServerForm)
# self.okBtn.setObjectName(_fromUtf8("okBtn"))
# self.horizontalLayout_2.addWidget(self.okBtn)
# self.cancelBtn = QtGui.QPushButton(CreateTcpServerForm)
# self.cancelBtn.setObjectName(_fromUtf8("cancelBtn"))
# self.horizontalLayout_2.addWidget(self.cancelBtn)
# self.gridLayout.addLayout(self.horizontalLayout_2, 4, 0, 1, 1)
#
# self.retranslateUi(CreateTcpServerForm)
# QtCore.QMetaObject.connectSlotsByName(CreateTcpServerForm)
#
# def retranslateUi(self, CreateTcpServerForm):
# CreateTcpServerForm.setWindowTitle(_translate("CreateTcpServerForm", "创建TCP服务器", None))
# self.label_2.setText(_translate("CreateTcpServerForm", "监听地址", None))
# self.label.setText(_translate("CreateTcpServerForm", "监听端口", None))
# self.okBtn.setText(_translate("CreateTcpServerForm", "确定", None))
# self.cancelBtn.setText(_translate("CreateTcpServerForm", "取消", None))
which might include code, classes, or functions. Output only the next line. | self.setModal(True) |
Continue the code snippet: <|code_start|> QWidget.__init__(self, parent)
self.setWindowFlags(Qt.FramelessWindowHint | Qt.Dialog)
self.ui = Ui_TipPupup()
self.ui.setupUi(self)
self.timer = QTimer()
self.timer.timeout.connect(self.__onTimeout)
self.queue = Queue(self.MAX_QUEUE_SIZE)
self.timerCounter = 0
self.maxOpacityCounter = 0
self.setWindowOpacity(0)
def __makeText(self, text, type=TEXT_TYPE_INFO, t=SHORT_TIME):
if self.timer.isActive():
if not self.queue.full():
self.queue.put_nowait((text, type, t))
return
iconPath = self.INFO_ICON_PATH
if type == self.TEXT_TYPE_WARNING:
iconPath = self.WARNING_ICON_PATH
elif type == self.TEXT_TYPE_ERROR:
iconPath = self.ERROR_ICON_PATH
self.ui.image_label.setPixmap(QPixmap(iconPath))
self.ui.text_label.setText(text)
self.timer.start(self.TIMER_INTERVAL)
<|code_end|>
. Use current file imports:
from ui.Ui_TipPupup import Ui_TipPupup
from ui.TipIcons import *
from Queue import Queue
from PyQt4.QtGui import QApplication, QWidget, QPixmap
from PyQt4.QtCore import QTimer, Qt
import sys
and context (classes, functions, or code) from other files:
# Path: ui/Ui_TipPupup.py
# class Ui_TipPupup(object):
# def setupUi(self, TipWidget):
# TipWidget.setObjectName(_fromUtf8("TipWidget"))
# TipWidget.resize(375, 82)
# TipWidget.setStyleSheet(_fromUtf8("background-color: rgb(89, 89, 89);"))
# self.image_label = QtGui.QLabel(TipWidget)
# self.image_label.setGeometry(QtCore.QRect(10, 10, 64, 64))
# self.image_label.setFrameShape(QtGui.QFrame.NoFrame)
# self.image_label.setText(_fromUtf8(""))
# self.image_label.setObjectName(_fromUtf8("image_label"))
# self.text_label = QtGui.QLabel(TipWidget)
# self.text_label.setGeometry(QtCore.QRect(80, 20, 281, 41))
# self.text_label.setStyleSheet(_fromUtf8("color: rgb(255, 255, 255);\n"
# "font: 11pt \"新宋体\";"))
# self.text_label.setObjectName(_fromUtf8("text_label"))
#
# self.retranslateUi(TipWidget)
# QtCore.QMetaObject.connectSlotsByName(TipWidget)
#
# def retranslateUi(self, TipWidget):
# TipWidget.setWindowTitle(QtGui.QApplication.translate("TipWidget", "Form", None, QtGui.QApplication.UnicodeUTF8))
# self.text_label.setText(QtGui.QApplication.translate("TipWidget", "TextLabel", None, QtGui.QApplication.UnicodeUTF8))
. Output only the next line. | self.show() |
Given snippet: <|code_start|>#-*- coding: utf-8 -*-
class SockClient():
RECV_SIZE = 262144
def __init__(self, parentId, sock, addr, sockType):
self.parentId = parentId
self._id = id(self)
self.sockType = sockType
self.conFlag = sockType == socktypes.TCP_CLIENT_REMOTE
self.onlyStopSocket = False
self.receiver = None
if sock:
self.sock = sock
else:
self.createSockByType(sockType)
self.sock.setblocking(0)
self.ip, self.port = addr[0:]
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from log import logger
from SigObject import sigObject
from net import socktypes
import signals
import socket
import select
import socktypes
import threading
and context:
# Path: log.py
# class __NullLogger:
# def debug(self, msg=""): pass
# def warning(self, msg=""): pass
# def info(self, msg=""): pass
# def error(self, msg=""): pass
# def fatal(self, msg=""): pass
#
# Path: SigObject.py
# class SigObject(QObject):
# def __init__(self):
#
# Path: net/socktypes.py
# TCP_SERVER_BASE_TYPE = 0x00
# TCP_CLIENT_BASE_TYPE = 0x10
# UDP_SERVER_BASE_TYPE = 0x20
# UDP_CLIENT_BASE_TYPE = 0x30
# TCP_SERVER = TCP_SERVER_BASE_TYPE | 0x01
# TCP_CLIENT_REMOTE = TCP_SERVER_BASE_TYPE | 0x02
# TCP_CLIENT_LOCAL = TCP_CLIENT_BASE_TYPE | 0x10
# UDP_SERVER = UDP_SERVER_BASE_TYPE | 0x00 # it's just like a sever
# UDP_CLIENT_REMOTE = UDP_SERVER_BASE_TYPE | 0x01
# UDP_CLIENT_LOCAL = UDP_CLIENT_BASE_TYPE | 0x01
which might include code, classes, or functions. Output only the next line. | def createSockByType(self, sockType): |
Given the code snippet: <|code_start|> else:
return -1
def getSockType(self):
return self.sockType
def isConnected(self):
return self.conFlag
def connect(self):
try:
if self.sock is None:
self.createSockByType(self.sockType)
self.sock.settimeout(0.5)
self.sock.connect((self.ip, self.port))
self.sock.setblocking(0)
self.conFlag = True
return True
except Exception as e:
logger.error("connect exp: %s" % e.message)
return
def sendall(self, data):
if not isinstance(data, basestring):
return False
return self.sock.sendall(data) is None
<|code_end|>
, generate the next line using the imports in this file:
from log import logger
from SigObject import sigObject
from net import socktypes
import signals
import socket
import select
import socktypes
import threading
and context (functions, classes, or occasionally code) from other files:
# Path: log.py
# class __NullLogger:
# def debug(self, msg=""): pass
# def warning(self, msg=""): pass
# def info(self, msg=""): pass
# def error(self, msg=""): pass
# def fatal(self, msg=""): pass
#
# Path: SigObject.py
# class SigObject(QObject):
# def __init__(self):
#
# Path: net/socktypes.py
# TCP_SERVER_BASE_TYPE = 0x00
# TCP_CLIENT_BASE_TYPE = 0x10
# UDP_SERVER_BASE_TYPE = 0x20
# UDP_CLIENT_BASE_TYPE = 0x30
# TCP_SERVER = TCP_SERVER_BASE_TYPE | 0x01
# TCP_CLIENT_REMOTE = TCP_SERVER_BASE_TYPE | 0x02
# TCP_CLIENT_LOCAL = TCP_CLIENT_BASE_TYPE | 0x10
# UDP_SERVER = UDP_SERVER_BASE_TYPE | 0x00 # it's just like a sever
# UDP_CLIENT_REMOTE = UDP_SERVER_BASE_TYPE | 0x01
# UDP_CLIENT_LOCAL = UDP_CLIENT_BASE_TYPE | 0x01
. Output only the next line. | def getId(self): |
Given the code snippet: <|code_start|>#-*- coding: utf-8 -*-
class SockClient():
RECV_SIZE = 262144
def __init__(self, parentId, sock, addr, sockType):
self.parentId = parentId
self._id = id(self)
self.sockType = sockType
self.conFlag = sockType == socktypes.TCP_CLIENT_REMOTE
self.onlyStopSocket = False
self.receiver = None
if sock:
self.sock = sock
else:
self.createSockByType(sockType)
self.sock.setblocking(0)
self.ip, self.port = addr[0:]
def createSockByType(self, sockType):
if sockType == socktypes.UDP_CLIENT_LOCAL:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
elif sockType == socktypes.TCP_CLIENT_LOCAL:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
else:
logger.error("***** ERROR SOCKTYPE *****")
def getAddress(self):
<|code_end|>
, generate the next line using the imports in this file:
from log import logger
from SigObject import sigObject
from net import socktypes
import signals
import socket
import select
import socktypes
import threading
and context (functions, classes, or occasionally code) from other files:
# Path: log.py
# class __NullLogger:
# def debug(self, msg=""): pass
# def warning(self, msg=""): pass
# def info(self, msg=""): pass
# def error(self, msg=""): pass
# def fatal(self, msg=""): pass
#
# Path: SigObject.py
# class SigObject(QObject):
# def __init__(self):
#
# Path: net/socktypes.py
# TCP_SERVER_BASE_TYPE = 0x00
# TCP_CLIENT_BASE_TYPE = 0x10
# UDP_SERVER_BASE_TYPE = 0x20
# UDP_CLIENT_BASE_TYPE = 0x30
# TCP_SERVER = TCP_SERVER_BASE_TYPE | 0x01
# TCP_CLIENT_REMOTE = TCP_SERVER_BASE_TYPE | 0x02
# TCP_CLIENT_LOCAL = TCP_CLIENT_BASE_TYPE | 0x10
# UDP_SERVER = UDP_SERVER_BASE_TYPE | 0x00 # it's just like a sever
# UDP_CLIENT_REMOTE = UDP_SERVER_BASE_TYPE | 0x01
# UDP_CLIENT_LOCAL = UDP_CLIENT_BASE_TYPE | 0x01
. Output only the next line. | return "%s:%d" % (self.ip, self.port) |
Predict the next line for this snippet: <|code_start|>#-*- coding: utf-8 -*-
class UdpServerManager(ServerManager):
def __init__(self):
ServerManager.__init__(self)
def create(self, ip, port):
_id, address = self.create_(ip, port, socktypes.UDP_SERVER)
return self.serverDict.get(_id), _id, address
<|code_end|>
with the help of current file imports:
from ServerManager import ServerManager
from net import socktypes
and context from other files:
# Path: net/socktypes.py
# TCP_SERVER_BASE_TYPE = 0x00
# TCP_CLIENT_BASE_TYPE = 0x10
# UDP_SERVER_BASE_TYPE = 0x20
# UDP_CLIENT_BASE_TYPE = 0x30
# TCP_SERVER = TCP_SERVER_BASE_TYPE | 0x01
# TCP_CLIENT_REMOTE = TCP_SERVER_BASE_TYPE | 0x02
# TCP_CLIENT_LOCAL = TCP_CLIENT_BASE_TYPE | 0x10
# UDP_SERVER = UDP_SERVER_BASE_TYPE | 0x00 # it's just like a sever
# UDP_CLIENT_REMOTE = UDP_SERVER_BASE_TYPE | 0x01
# UDP_CLIENT_LOCAL = UDP_CLIENT_BASE_TYPE | 0x01
, which may contain function names, class names, or code. Output only the next line. | udpServerManager = UdpServerManager() |
Given snippet: <|code_start|> SNDBUF_SIZE = 262144
BACKLOG = 10
def __init__(self, port, ip="0.0.0.0"):
threading.Thread.__init__(self)
self.ip, self.port = ip, port
self.tcpClients = {}
self.stopflag = False
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, self.RCVBUF_SIZE)
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, self.SNDBUF_SIZE)
self.sock.setsockopt(socket.SOL_SOCKET, socket.TCP_NODELAY, 1)
#self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR , 1) # do not turn on SO_REUSEADDR
self.sock.settimeout(0.1)
self._id = id(self.sock)
def getId(self):
logger.debug("id: %d" % self._id)
return self._id
def getAddress(self):
return "%s:%d" % (self.ip, self.port)
def start_(self):
try:
self.sock.bind((self.ip, self.port))
self.sock.listen(self.BACKLOG)
logger.debug("TCP server bind on %s:%d success" % (self.ip, self.port))
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from TcpClient import TcpClient
from SigObject import sigObject
from log import logger
import signals
import socket
import socktypes
import threading
and context:
# Path: SigObject.py
# class SigObject(QObject):
# def __init__(self):
#
# Path: log.py
# class __NullLogger:
# def debug(self, msg=""): pass
# def warning(self, msg=""): pass
# def info(self, msg=""): pass
# def error(self, msg=""): pass
# def fatal(self, msg=""): pass
which might include code, classes, or functions. Output only the next line. | except socket.error: |
Based on the snippet: <|code_start|>#-*- coding: utf-8 -*-
class TcpServer(threading.Thread):
RCVBUF_SIZE = 262144
SNDBUF_SIZE = 262144
BACKLOG = 10
def __init__(self, port, ip="0.0.0.0"):
threading.Thread.__init__(self)
self.ip, self.port = ip, port
<|code_end|>
, predict the immediate next line with the help of imports:
from TcpClient import TcpClient
from SigObject import sigObject
from log import logger
import signals
import socket
import socktypes
import threading
and context (classes, functions, sometimes code) from other files:
# Path: SigObject.py
# class SigObject(QObject):
# def __init__(self):
#
# Path: log.py
# class __NullLogger:
# def debug(self, msg=""): pass
# def warning(self, msg=""): pass
# def info(self, msg=""): pass
# def error(self, msg=""): pass
# def fatal(self, msg=""): pass
. Output only the next line. | self.tcpClients = {} |
Using the snippet: <|code_start|> sockCls = self.sockClientClsDict.get(sockType)
if not sockCls:
return None, -1, ""
sockClient = sockCls(0, None, (ip, port), sockType)
if connect:
if not sockClient.connect():
logger.error("fail to connect to server")
return None, -1, ""
sockClient.start()
logger.debug("sock client connect success")
_id = sockClient.getId()
self.clientDict[_id] = sockClient
logger.info("sock client create ok")
return sockClient, _id, sockClient.getAddress()
def removeClient(self, _id):
logger.debug("**** remove client: %d" % _id)
sockClient = self.clientDict.get(_id)
if not sockClient:
logger.error("sockClient is None")
return
sockClient.stop()
del self.clientDict[_id]
logger.debug("remove Client ok")
<|code_end|>
, determine the next line of code. You have imports:
from log import logger
from UdpClient import UdpClient
from TcpClient import TcpClient
from net import socktypes
and context (class names, function names, or code) available:
# Path: log.py
# class __NullLogger:
# def debug(self, msg=""): pass
# def warning(self, msg=""): pass
# def info(self, msg=""): pass
# def error(self, msg=""): pass
# def fatal(self, msg=""): pass
#
# Path: net/socktypes.py
# TCP_SERVER_BASE_TYPE = 0x00
# TCP_CLIENT_BASE_TYPE = 0x10
# UDP_SERVER_BASE_TYPE = 0x20
# UDP_CLIENT_BASE_TYPE = 0x30
# TCP_SERVER = TCP_SERVER_BASE_TYPE | 0x01
# TCP_CLIENT_REMOTE = TCP_SERVER_BASE_TYPE | 0x02
# TCP_CLIENT_LOCAL = TCP_CLIENT_BASE_TYPE | 0x10
# UDP_SERVER = UDP_SERVER_BASE_TYPE | 0x00 # it's just like a sever
# UDP_CLIENT_REMOTE = UDP_SERVER_BASE_TYPE | 0x01
# UDP_CLIENT_LOCAL = UDP_CLIENT_BASE_TYPE | 0x01
. Output only the next line. | def removeAllClient(self): |
Predict the next line after this snippet: <|code_start|>#-*- coding: utf-8 -*-
class SockClientManager(object):
def __init__(self):
self.clientDict = {}
self.sockClientClsDict = { socktypes.UDP_CLIENT_LOCAL: UdpClient,
socktypes.TCP_CLIENT_LOCAL: TcpClient}
def create_(self, ip, port, sockType, connect=True):
sockCls = self.sockClientClsDict.get(sockType)
if not sockCls:
return None, -1, ""
sockClient = sockCls(0, None, (ip, port), sockType)
if connect:
if not sockClient.connect():
logger.error("fail to connect to server")
return None, -1, ""
sockClient.start()
logger.debug("sock client connect success")
_id = sockClient.getId()
self.clientDict[_id] = sockClient
logger.info("sock client create ok")
return sockClient, _id, sockClient.getAddress()
<|code_end|>
using the current file's imports:
from log import logger
from UdpClient import UdpClient
from TcpClient import TcpClient
from net import socktypes
and any relevant context from other files:
# Path: log.py
# class __NullLogger:
# def debug(self, msg=""): pass
# def warning(self, msg=""): pass
# def info(self, msg=""): pass
# def error(self, msg=""): pass
# def fatal(self, msg=""): pass
#
# Path: net/socktypes.py
# TCP_SERVER_BASE_TYPE = 0x00
# TCP_CLIENT_BASE_TYPE = 0x10
# UDP_SERVER_BASE_TYPE = 0x20
# UDP_CLIENT_BASE_TYPE = 0x30
# TCP_SERVER = TCP_SERVER_BASE_TYPE | 0x01
# TCP_CLIENT_REMOTE = TCP_SERVER_BASE_TYPE | 0x02
# TCP_CLIENT_LOCAL = TCP_CLIENT_BASE_TYPE | 0x10
# UDP_SERVER = UDP_SERVER_BASE_TYPE | 0x00 # it's just like a sever
# UDP_CLIENT_REMOTE = UDP_SERVER_BASE_TYPE | 0x01
# UDP_CLIENT_LOCAL = UDP_CLIENT_BASE_TYPE | 0x01
. Output only the next line. | def removeClient(self, _id): |
Continue the code snippet: <|code_start|>
MEMSQL_LOADER_DB = 'memsql_loader.db'
def get_loader_db_path():
return os.path.join(paths.get_data_dir(), MEMSQL_LOADER_DB)
# IMPORTANT NOTE: This class cannot be shared across forked processes unless
# you use fork_wrapper.
class LoaderStorage(APSWStorage):
_instance = None
_initialized = False
_instance_lock = multiprocessing.RLock()
# We use LoaderStorage as a singleton.
def __new__(cls, *args, **kwargs):
with cls._instance_lock:
if cls._instance is None:
cls._instance = super(LoaderStorage, cls).__new__(
cls, *args, **kwargs)
cls._initialized = False
return cls._instance
@classmethod
def drop_database(cls):
with cls._instance_lock:
if os.path.isfile(get_loader_db_path()):
os.remove(get_loader_db_path())
<|code_end|>
. Use current file imports:
import contextlib
import gc
import multiprocessing
import os
from memsql_loader.util.apsw_storage import APSWStorage
from memsql_loader.util import paths
and context (classes, functions, or code) from other files:
# Path: memsql_loader/util/apsw_storage.py
# class APSWStorage(object):
# """ SQLite backed database
#
# Usage ::
#
# class BarStorage(APSWStorage):
# def setup(self):
# with self.transaction() as cursor:
# cursor.execute("create table bar (fuzz int)")
#
# storage = BarStorage('bar.db')
#
# # transactions can be nested
# # note that cursor() returns a different connection from the transaction
# with storage.transaction() as cursor:
# cursor.execute("insert into bar values (1)")
#
# with storage.transaction() as cursor2:
# cursor2.execute("insert into bar values (2)")
#
# assert apsw_helpers.get(storage.cursor(), "select count(*) c from foo").c == 0
# assert apsw_helpers.get(storage.cursor(), "select count(*) c from foo").c == 0
# assert apsw_helpers.get(storage.cursor(), "select count(*) c from foo").c == 2
#
# # transactions can also be verified for changes
# # note that the connection for the transaction and for checking changes is shared
# with storage.transaction() as cursor:
# cursor.execute("insert into bar values (1) where 0=1")
# assert storage.transaction_changes() == 0, "no changes made by transaction"
#
# """
# _db = None
# _db_t = None
# _write_lock = None
# _read_lock = None
#
# def __init__(self, path):
# self._write_lock = multiprocessing.RLock()
# self.path = path
# self.setup_connections()
#
# def setup_connections(self):
# """ Setup a sqlite3 database at the provided path. """
# # _db_t is for transactions, _db is for all other cursors
# self._db = apsw.Connection(self.path)
# self._db_t = apsw.Connection(self.path)
# self._db.setbusytimeout(60000)
# self._db_t.setbusytimeout(60000)
#
# self._read_lock = threading.RLock()
#
# def pragma(cursor, name, value, check_val):
# cursor.execute("pragma %s=%s" % (name, value))
# server_val = cursor.execute("pragma %s" % name).fetchone()[0]
# if not server_val == check_val:
# raise APSWStorageInitFailure("Failed to set %s to %s (%s != %s)" % (name, value, server_val, check_val))
#
# with self._write_lock:
# for db in [self._db, self._db_t]:
# cursor = db.cursor()
# pragma(cursor, "journal_mode", "WAL", "wal")
# pragma(cursor, "synchronous", "NORMAL", 1)
# pragma(cursor, "foreign_keys", "ON", 1)
#
# @contextlib.contextmanager
# def transaction(self):
# """ Take the write lock, and return a cursor to the database.
#
# Transactions can be nested.
# """
#
# with self._write_lock:
# with self._db_t:
# yield self._db_t.cursor()
# try:
# self._db_t.wal_checkpoint()
# except (apsw.BusyError, apsw.LockedError):
# pass
# with self._read_lock:
# try:
# self._db.wal_checkpoint()
# except (apsw.BusyError, apsw.LockedError):
# pass
#
# @contextlib.contextmanager
# def cursor(self):
# with self._read_lock:
# yield self._db.cursor()
#
# def transaction_changes(self):
# return self._db_t.changes()
#
# def close_connections(self):
# self._db.close(True)
# self._db_t.close(True)
# self._db = None
# self._db_t = None
#
# Path: memsql_loader/util/paths.py
# MEMSQL_LOADER_PATH_ENV = "MEMSQL_LOADER_DATA_DIRECTORY"
# def get_data_dir():
. Output only the next line. | if os.path.isfile(get_loader_db_path() + '-shm'): |
Next line prediction: <|code_start|> cls, *args, **kwargs)
cls._initialized = False
return cls._instance
@classmethod
def drop_database(cls):
with cls._instance_lock:
if os.path.isfile(get_loader_db_path()):
os.remove(get_loader_db_path())
if os.path.isfile(get_loader_db_path() + '-shm'):
os.remove(get_loader_db_path() + '-shm')
if os.path.isfile(get_loader_db_path() + '-wal'):
os.remove(get_loader_db_path() + '-wal')
cls._instance = None
@classmethod
@contextlib.contextmanager
def fork_wrapper(cls):
# This context manager should be used around any code that forks new
# processes that will use a LoaderStorage object (e.g. Worker objects).
# This ensures that we don't share SQLite connections across forked
# processes.
with cls._instance_lock:
if cls._instance is not None:
cls._instance.close_connections()
# We garbage collect here to clean up any SQLite objects we
# may have missed; this is important because any surviving
# objects post-fork will mess up SQLite connections in the
# child process. We use generation=2 to collect as many
# objects as possible.
<|code_end|>
. Use current file imports:
(import contextlib
import gc
import multiprocessing
import os
from memsql_loader.util.apsw_storage import APSWStorage
from memsql_loader.util import paths)
and context including class names, function names, or small code snippets from other files:
# Path: memsql_loader/util/apsw_storage.py
# class APSWStorage(object):
# """ SQLite backed database
#
# Usage ::
#
# class BarStorage(APSWStorage):
# def setup(self):
# with self.transaction() as cursor:
# cursor.execute("create table bar (fuzz int)")
#
# storage = BarStorage('bar.db')
#
# # transactions can be nested
# # note that cursor() returns a different connection from the transaction
# with storage.transaction() as cursor:
# cursor.execute("insert into bar values (1)")
#
# with storage.transaction() as cursor2:
# cursor2.execute("insert into bar values (2)")
#
# assert apsw_helpers.get(storage.cursor(), "select count(*) c from foo").c == 0
# assert apsw_helpers.get(storage.cursor(), "select count(*) c from foo").c == 0
# assert apsw_helpers.get(storage.cursor(), "select count(*) c from foo").c == 2
#
# # transactions can also be verified for changes
# # note that the connection for the transaction and for checking changes is shared
# with storage.transaction() as cursor:
# cursor.execute("insert into bar values (1) where 0=1")
# assert storage.transaction_changes() == 0, "no changes made by transaction"
#
# """
# _db = None
# _db_t = None
# _write_lock = None
# _read_lock = None
#
# def __init__(self, path):
# self._write_lock = multiprocessing.RLock()
# self.path = path
# self.setup_connections()
#
# def setup_connections(self):
# """ Setup a sqlite3 database at the provided path. """
# # _db_t is for transactions, _db is for all other cursors
# self._db = apsw.Connection(self.path)
# self._db_t = apsw.Connection(self.path)
# self._db.setbusytimeout(60000)
# self._db_t.setbusytimeout(60000)
#
# self._read_lock = threading.RLock()
#
# def pragma(cursor, name, value, check_val):
# cursor.execute("pragma %s=%s" % (name, value))
# server_val = cursor.execute("pragma %s" % name).fetchone()[0]
# if not server_val == check_val:
# raise APSWStorageInitFailure("Failed to set %s to %s (%s != %s)" % (name, value, server_val, check_val))
#
# with self._write_lock:
# for db in [self._db, self._db_t]:
# cursor = db.cursor()
# pragma(cursor, "journal_mode", "WAL", "wal")
# pragma(cursor, "synchronous", "NORMAL", 1)
# pragma(cursor, "foreign_keys", "ON", 1)
#
# @contextlib.contextmanager
# def transaction(self):
# """ Take the write lock, and return a cursor to the database.
#
# Transactions can be nested.
# """
#
# with self._write_lock:
# with self._db_t:
# yield self._db_t.cursor()
# try:
# self._db_t.wal_checkpoint()
# except (apsw.BusyError, apsw.LockedError):
# pass
# with self._read_lock:
# try:
# self._db.wal_checkpoint()
# except (apsw.BusyError, apsw.LockedError):
# pass
#
# @contextlib.contextmanager
# def cursor(self):
# with self._read_lock:
# yield self._db.cursor()
#
# def transaction_changes(self):
# return self._db_t.changes()
#
# def close_connections(self):
# self._db.close(True)
# self._db_t.close(True)
# self._db = None
# self._db_t = None
#
# Path: memsql_loader/util/paths.py
# MEMSQL_LOADER_PATH_ENV = "MEMSQL_LOADER_DATA_DIRECTORY"
# def get_data_dir():
. Output only the next line. | gc.collect(2) |
Using the snippet: <|code_start|> if self._running_steps() != 0:
raise StepRunning()
if self.finished is not None:
raise AlreadyFinished()
with self.storage.cursor() as cursor:
affected_row = apsw_helpers.get(cursor, '''
SELECT * from %s
WHERE
id = :task_id
AND execution_id = :execution_id
AND last_contact > datetime(:now, 'unixepoch', '-%s second')
''' % (self._queue.table_name, self._queue.execution_ttl),
now=unix_timestamp(datetime.utcnow()),
task_id=self.task_id,
execution_id=self.execution_id)
if affected_row is None:
raise TaskDoesNotExist()
with self.storage.transaction() as cursor:
apsw_helpers.query(cursor, '''
UPDATE %s
SET
last_contact=NULL,
update_count=update_count + 1,
started=NULL,
steps=NULL,
execution_id=NULL,
finished=NULL,
<|code_end|>
, determine the next line of code. You have imports:
import copy
from datetime import datetime
from dateutil import parser
from contextlib import contextmanager
from memsql_loader.util import apsw_helpers, super_json as json
from memsql_loader.util.apsw_sql_step_queue.errors import TaskDoesNotExist, StepAlreadyStarted, StepNotStarted, StepAlreadyFinished, StepRunning, AlreadyFinished
from memsql_loader.util.apsw_sql_step_queue.time_helpers import unix_timestamp
and context (class names, function names, or code) available:
# Path: memsql_loader/util/apsw_helpers.py
# class _NoDefault(object):
# class _RowBase(object):
# class Row(_RowBase):
# class SelectResult(list):
# def __init__(self, fields, values):
# def get(self, name, default=_NoDefault):
# def set(self, name, value):
# def __getattr__(self, name):
# def __setattr__(self, name, value):
# def __getitem__(self, name):
# def __setitem__(self, name, value):
# def __contains__(self, name):
# def __iter__(self):
# def __len__(self):
# def keys(self):
# def values(self):
# def items(self):
# def __eq__(self, other):
# def __ne__(self, other):
# def as_dict(self):
# def for_json(self):
# def nope(self, *args, **kwargs):
# def __init__(self, fields, rows, is_rows=False, RowClass=Row):
# def width(self):
# def __getitem__(self, i):
# def query(cursor, query, *params, **kwparams):
# def get(cursor, query, *params, **kwparams):
#
# Path: memsql_loader/util/super_json.py
# def simplejson_datetime_serializer(obj):
# def _set_defaults(kwargs):
# def dumps(data, **kwargs):
# def loads(data, **kwargs):
# def safe_loads(data, default, **kwargs):
# def pformat(d):
#
# Path: memsql_loader/util/apsw_sql_step_queue/errors.py
# class TaskDoesNotExist(APSWSQLStepQueueException):
# pass
#
# class StepAlreadyStarted(APSWSQLStepQueueException):
# pass
#
# class StepNotStarted(APSWSQLStepQueueException):
# pass
#
# class StepAlreadyFinished(APSWSQLStepQueueException):
# pass
#
# class StepRunning(APSWSQLStepQueueException):
# pass
#
# class AlreadyFinished(APSWSQLStepQueueException):
# pass
#
# Path: memsql_loader/util/apsw_sql_step_queue/time_helpers.py
# def unix_timestamp(dt):
# return int(time.mktime(dt.timetuple()))
. Output only the next line. | result=NULL |
Given the code snippet: <|code_start|> self.data = None
self.result = None
# NOTE: These fields are specific to the memsql-loader use case;
# they are not necessary for the queue functionality.
self.job_id = None
self.file_id = None
self.md5 = None
self.bytes_total = None
self.bytes_downloaded = None
self.download_rate = None
self.steps = None
self._refresh()
###############################
# Public Interface
def valid(self):
""" Check to see if we are still active. """
if self.finished is not None:
return False
with self.storage.cursor() as cursor:
row = apsw_helpers.get(cursor, '''
SELECT (last_contact > datetime(:now, 'unixepoch', '-%s second')) AS valid
FROM %s
WHERE
id = :task_id
<|code_end|>
, generate the next line using the imports in this file:
import copy
from datetime import datetime
from dateutil import parser
from contextlib import contextmanager
from memsql_loader.util import apsw_helpers, super_json as json
from memsql_loader.util.apsw_sql_step_queue.errors import TaskDoesNotExist, StepAlreadyStarted, StepNotStarted, StepAlreadyFinished, StepRunning, AlreadyFinished
from memsql_loader.util.apsw_sql_step_queue.time_helpers import unix_timestamp
and context (functions, classes, or occasionally code) from other files:
# Path: memsql_loader/util/apsw_helpers.py
# class _NoDefault(object):
# class _RowBase(object):
# class Row(_RowBase):
# class SelectResult(list):
# def __init__(self, fields, values):
# def get(self, name, default=_NoDefault):
# def set(self, name, value):
# def __getattr__(self, name):
# def __setattr__(self, name, value):
# def __getitem__(self, name):
# def __setitem__(self, name, value):
# def __contains__(self, name):
# def __iter__(self):
# def __len__(self):
# def keys(self):
# def values(self):
# def items(self):
# def __eq__(self, other):
# def __ne__(self, other):
# def as_dict(self):
# def for_json(self):
# def nope(self, *args, **kwargs):
# def __init__(self, fields, rows, is_rows=False, RowClass=Row):
# def width(self):
# def __getitem__(self, i):
# def query(cursor, query, *params, **kwparams):
# def get(cursor, query, *params, **kwparams):
#
# Path: memsql_loader/util/super_json.py
# def simplejson_datetime_serializer(obj):
# def _set_defaults(kwargs):
# def dumps(data, **kwargs):
# def loads(data, **kwargs):
# def safe_loads(data, default, **kwargs):
# def pformat(d):
#
# Path: memsql_loader/util/apsw_sql_step_queue/errors.py
# class TaskDoesNotExist(APSWSQLStepQueueException):
# pass
#
# class StepAlreadyStarted(APSWSQLStepQueueException):
# pass
#
# class StepNotStarted(APSWSQLStepQueueException):
# pass
#
# class StepAlreadyFinished(APSWSQLStepQueueException):
# pass
#
# class StepRunning(APSWSQLStepQueueException):
# pass
#
# class AlreadyFinished(APSWSQLStepQueueException):
# pass
#
# Path: memsql_loader/util/apsw_sql_step_queue/time_helpers.py
# def unix_timestamp(dt):
# return int(time.mktime(dt.timetuple()))
. Output only the next line. | AND execution_id = :execution_id |
Using the snippet: <|code_start|> self.steps = self._load_steps(json.loads(row.steps))
self.started = row.started
self.finished = row.finished
def _load_steps(self, raw_steps):
""" load steps -> basically load all the datetime isoformats into datetimes """
for step in raw_steps:
if 'start' in step:
step['start'] = parser.parse(step['start'])
if 'stop' in step:
step['stop'] = parser.parse(step['stop'])
return raw_steps
def _save(self, finished=None, steps=None, result=None, data=None):
finished = finished if finished is not None else self.finished
with self.storage.transaction() as cursor:
apsw_helpers.query(cursor, '''
UPDATE %s
SET
last_contact=datetime(:now, 'unixepoch'),
update_count=update_count + 1,
steps=:steps,
finished=datetime(:finished, 'unixepoch'),
result=:result,
bytes_downloaded=:bytes_downloaded,
download_rate=:download_rate,
data=:data
WHERE
id = :task_id
AND execution_id = :execution_id
<|code_end|>
, determine the next line of code. You have imports:
import copy
from datetime import datetime
from dateutil import parser
from contextlib import contextmanager
from memsql_loader.util import apsw_helpers, super_json as json
from memsql_loader.util.apsw_sql_step_queue.errors import TaskDoesNotExist, StepAlreadyStarted, StepNotStarted, StepAlreadyFinished, StepRunning, AlreadyFinished
from memsql_loader.util.apsw_sql_step_queue.time_helpers import unix_timestamp
and context (class names, function names, or code) available:
# Path: memsql_loader/util/apsw_helpers.py
# class _NoDefault(object):
# class _RowBase(object):
# class Row(_RowBase):
# class SelectResult(list):
# def __init__(self, fields, values):
# def get(self, name, default=_NoDefault):
# def set(self, name, value):
# def __getattr__(self, name):
# def __setattr__(self, name, value):
# def __getitem__(self, name):
# def __setitem__(self, name, value):
# def __contains__(self, name):
# def __iter__(self):
# def __len__(self):
# def keys(self):
# def values(self):
# def items(self):
# def __eq__(self, other):
# def __ne__(self, other):
# def as_dict(self):
# def for_json(self):
# def nope(self, *args, **kwargs):
# def __init__(self, fields, rows, is_rows=False, RowClass=Row):
# def width(self):
# def __getitem__(self, i):
# def query(cursor, query, *params, **kwparams):
# def get(cursor, query, *params, **kwparams):
#
# Path: memsql_loader/util/super_json.py
# def simplejson_datetime_serializer(obj):
# def _set_defaults(kwargs):
# def dumps(data, **kwargs):
# def loads(data, **kwargs):
# def safe_loads(data, default, **kwargs):
# def pformat(d):
#
# Path: memsql_loader/util/apsw_sql_step_queue/errors.py
# class TaskDoesNotExist(APSWSQLStepQueueException):
# pass
#
# class StepAlreadyStarted(APSWSQLStepQueueException):
# pass
#
# class StepNotStarted(APSWSQLStepQueueException):
# pass
#
# class StepAlreadyFinished(APSWSQLStepQueueException):
# pass
#
# class StepRunning(APSWSQLStepQueueException):
# pass
#
# class AlreadyFinished(APSWSQLStepQueueException):
# pass
#
# Path: memsql_loader/util/apsw_sql_step_queue/time_helpers.py
# def unix_timestamp(dt):
# return int(time.mktime(dt.timetuple()))
. Output only the next line. | AND last_contact > datetime(:now, 'unixepoch', '-%s second') |
Predict the next line after this snippet: <|code_start|> self.job_id = row.job_id
self.file_id = row.file_id
self.md5 = row.md5
self.bytes_total = row.bytes_total
self.bytes_downloaded = row.bytes_downloaded
self.download_rate = row.download_rate
self.steps = self._load_steps(json.loads(row.steps))
self.started = row.started
self.finished = row.finished
def _load_steps(self, raw_steps):
""" load steps -> basically load all the datetime isoformats into datetimes """
for step in raw_steps:
if 'start' in step:
step['start'] = parser.parse(step['start'])
if 'stop' in step:
step['stop'] = parser.parse(step['stop'])
return raw_steps
def _save(self, finished=None, steps=None, result=None, data=None):
finished = finished if finished is not None else self.finished
with self.storage.transaction() as cursor:
apsw_helpers.query(cursor, '''
UPDATE %s
SET
last_contact=datetime(:now, 'unixepoch'),
update_count=update_count + 1,
steps=:steps,
finished=datetime(:finished, 'unixepoch'),
result=:result,
<|code_end|>
using the current file's imports:
import copy
from datetime import datetime
from dateutil import parser
from contextlib import contextmanager
from memsql_loader.util import apsw_helpers, super_json as json
from memsql_loader.util.apsw_sql_step_queue.errors import TaskDoesNotExist, StepAlreadyStarted, StepNotStarted, StepAlreadyFinished, StepRunning, AlreadyFinished
from memsql_loader.util.apsw_sql_step_queue.time_helpers import unix_timestamp
and any relevant context from other files:
# Path: memsql_loader/util/apsw_helpers.py
# class _NoDefault(object):
# class _RowBase(object):
# class Row(_RowBase):
# class SelectResult(list):
# def __init__(self, fields, values):
# def get(self, name, default=_NoDefault):
# def set(self, name, value):
# def __getattr__(self, name):
# def __setattr__(self, name, value):
# def __getitem__(self, name):
# def __setitem__(self, name, value):
# def __contains__(self, name):
# def __iter__(self):
# def __len__(self):
# def keys(self):
# def values(self):
# def items(self):
# def __eq__(self, other):
# def __ne__(self, other):
# def as_dict(self):
# def for_json(self):
# def nope(self, *args, **kwargs):
# def __init__(self, fields, rows, is_rows=False, RowClass=Row):
# def width(self):
# def __getitem__(self, i):
# def query(cursor, query, *params, **kwparams):
# def get(cursor, query, *params, **kwparams):
#
# Path: memsql_loader/util/super_json.py
# def simplejson_datetime_serializer(obj):
# def _set_defaults(kwargs):
# def dumps(data, **kwargs):
# def loads(data, **kwargs):
# def safe_loads(data, default, **kwargs):
# def pformat(d):
#
# Path: memsql_loader/util/apsw_sql_step_queue/errors.py
# class TaskDoesNotExist(APSWSQLStepQueueException):
# pass
#
# class StepAlreadyStarted(APSWSQLStepQueueException):
# pass
#
# class StepNotStarted(APSWSQLStepQueueException):
# pass
#
# class StepAlreadyFinished(APSWSQLStepQueueException):
# pass
#
# class StepRunning(APSWSQLStepQueueException):
# pass
#
# class AlreadyFinished(APSWSQLStepQueueException):
# pass
#
# Path: memsql_loader/util/apsw_sql_step_queue/time_helpers.py
# def unix_timestamp(dt):
# return int(time.mktime(dt.timetuple()))
. Output only the next line. | bytes_downloaded=:bytes_downloaded, |
Based on the snippet: <|code_start|>
def valid(self):
""" Check to see if we are still active. """
if self.finished is not None:
return False
with self.storage.cursor() as cursor:
row = apsw_helpers.get(cursor, '''
SELECT (last_contact > datetime(:now, 'unixepoch', '-%s second')) AS valid
FROM %s
WHERE
id = :task_id
AND execution_id = :execution_id
''' % (self._queue.execution_ttl, self._queue.table_name),
now=unix_timestamp(datetime.utcnow()),
task_id=self.task_id,
execution_id=self.execution_id)
return bool(row is not None and row.valid)
def ping(self):
""" Notify the queue that this task is still active. """
if self.finished is not None:
raise AlreadyFinished()
with self.storage.cursor() as cursor:
affected_row = apsw_helpers.get(cursor, '''
SELECT * from %s
WHERE
id = :task_id
<|code_end|>
, predict the immediate next line with the help of imports:
import copy
from datetime import datetime
from dateutil import parser
from contextlib import contextmanager
from memsql_loader.util import apsw_helpers, super_json as json
from memsql_loader.util.apsw_sql_step_queue.errors import TaskDoesNotExist, StepAlreadyStarted, StepNotStarted, StepAlreadyFinished, StepRunning, AlreadyFinished
from memsql_loader.util.apsw_sql_step_queue.time_helpers import unix_timestamp
and context (classes, functions, sometimes code) from other files:
# Path: memsql_loader/util/apsw_helpers.py
# class _NoDefault(object):
# class _RowBase(object):
# class Row(_RowBase):
# class SelectResult(list):
# def __init__(self, fields, values):
# def get(self, name, default=_NoDefault):
# def set(self, name, value):
# def __getattr__(self, name):
# def __setattr__(self, name, value):
# def __getitem__(self, name):
# def __setitem__(self, name, value):
# def __contains__(self, name):
# def __iter__(self):
# def __len__(self):
# def keys(self):
# def values(self):
# def items(self):
# def __eq__(self, other):
# def __ne__(self, other):
# def as_dict(self):
# def for_json(self):
# def nope(self, *args, **kwargs):
# def __init__(self, fields, rows, is_rows=False, RowClass=Row):
# def width(self):
# def __getitem__(self, i):
# def query(cursor, query, *params, **kwparams):
# def get(cursor, query, *params, **kwparams):
#
# Path: memsql_loader/util/super_json.py
# def simplejson_datetime_serializer(obj):
# def _set_defaults(kwargs):
# def dumps(data, **kwargs):
# def loads(data, **kwargs):
# def safe_loads(data, default, **kwargs):
# def pformat(d):
#
# Path: memsql_loader/util/apsw_sql_step_queue/errors.py
# class TaskDoesNotExist(APSWSQLStepQueueException):
# pass
#
# class StepAlreadyStarted(APSWSQLStepQueueException):
# pass
#
# class StepNotStarted(APSWSQLStepQueueException):
# pass
#
# class StepAlreadyFinished(APSWSQLStepQueueException):
# pass
#
# class StepRunning(APSWSQLStepQueueException):
# pass
#
# class AlreadyFinished(APSWSQLStepQueueException):
# pass
#
# Path: memsql_loader/util/apsw_sql_step_queue/time_helpers.py
# def unix_timestamp(dt):
# return int(time.mktime(dt.timetuple()))
. Output only the next line. | AND execution_id = :execution_id |
Based on the snippet: <|code_start|> return bool(row is not None and row.valid)
def ping(self):
""" Notify the queue that this task is still active. """
if self.finished is not None:
raise AlreadyFinished()
with self.storage.cursor() as cursor:
affected_row = apsw_helpers.get(cursor, '''
SELECT * from %s
WHERE
id = :task_id
AND execution_id = :execution_id
AND last_contact > datetime(:now, 'unixepoch', '-%s second')
''' % (self._queue.table_name, self._queue.execution_ttl),
now=unix_timestamp(datetime.utcnow()),
task_id=self.task_id,
execution_id=self.execution_id)
if not affected_row:
raise TaskDoesNotExist()
with self.storage.transaction() as cursor:
apsw_helpers.query(cursor, '''
UPDATE %s
SET
last_contact=datetime(:now, 'unixepoch'),
update_count=update_count + 1
WHERE
id = :task_id
<|code_end|>
, predict the immediate next line with the help of imports:
import copy
from datetime import datetime
from dateutil import parser
from contextlib import contextmanager
from memsql_loader.util import apsw_helpers, super_json as json
from memsql_loader.util.apsw_sql_step_queue.errors import TaskDoesNotExist, StepAlreadyStarted, StepNotStarted, StepAlreadyFinished, StepRunning, AlreadyFinished
from memsql_loader.util.apsw_sql_step_queue.time_helpers import unix_timestamp
and context (classes, functions, sometimes code) from other files:
# Path: memsql_loader/util/apsw_helpers.py
# class _NoDefault(object):
# class _RowBase(object):
# class Row(_RowBase):
# class SelectResult(list):
# def __init__(self, fields, values):
# def get(self, name, default=_NoDefault):
# def set(self, name, value):
# def __getattr__(self, name):
# def __setattr__(self, name, value):
# def __getitem__(self, name):
# def __setitem__(self, name, value):
# def __contains__(self, name):
# def __iter__(self):
# def __len__(self):
# def keys(self):
# def values(self):
# def items(self):
# def __eq__(self, other):
# def __ne__(self, other):
# def as_dict(self):
# def for_json(self):
# def nope(self, *args, **kwargs):
# def __init__(self, fields, rows, is_rows=False, RowClass=Row):
# def width(self):
# def __getitem__(self, i):
# def query(cursor, query, *params, **kwparams):
# def get(cursor, query, *params, **kwparams):
#
# Path: memsql_loader/util/super_json.py
# def simplejson_datetime_serializer(obj):
# def _set_defaults(kwargs):
# def dumps(data, **kwargs):
# def loads(data, **kwargs):
# def safe_loads(data, default, **kwargs):
# def pformat(d):
#
# Path: memsql_loader/util/apsw_sql_step_queue/errors.py
# class TaskDoesNotExist(APSWSQLStepQueueException):
# pass
#
# class StepAlreadyStarted(APSWSQLStepQueueException):
# pass
#
# class StepNotStarted(APSWSQLStepQueueException):
# pass
#
# class StepAlreadyFinished(APSWSQLStepQueueException):
# pass
#
# class StepRunning(APSWSQLStepQueueException):
# pass
#
# class AlreadyFinished(APSWSQLStepQueueException):
# pass
#
# Path: memsql_loader/util/apsw_sql_step_queue/time_helpers.py
# def unix_timestamp(dt):
# return int(time.mktime(dt.timetuple()))
. Output only the next line. | AND execution_id = :execution_id |
Predict the next line for this snippet: <|code_start|> execution_id=self.execution_id)
return bool(row is not None and row.valid)
def ping(self):
""" Notify the queue that this task is still active. """
if self.finished is not None:
raise AlreadyFinished()
with self.storage.cursor() as cursor:
affected_row = apsw_helpers.get(cursor, '''
SELECT * from %s
WHERE
id = :task_id
AND execution_id = :execution_id
AND last_contact > datetime(:now, 'unixepoch', '-%s second')
''' % (self._queue.table_name, self._queue.execution_ttl),
now=unix_timestamp(datetime.utcnow()),
task_id=self.task_id,
execution_id=self.execution_id)
if not affected_row:
raise TaskDoesNotExist()
with self.storage.transaction() as cursor:
apsw_helpers.query(cursor, '''
UPDATE %s
SET
last_contact=datetime(:now, 'unixepoch'),
update_count=update_count + 1
<|code_end|>
with the help of current file imports:
import copy
from datetime import datetime
from dateutil import parser
from contextlib import contextmanager
from memsql_loader.util import apsw_helpers, super_json as json
from memsql_loader.util.apsw_sql_step_queue.errors import TaskDoesNotExist, StepAlreadyStarted, StepNotStarted, StepAlreadyFinished, StepRunning, AlreadyFinished
from memsql_loader.util.apsw_sql_step_queue.time_helpers import unix_timestamp
and context from other files:
# Path: memsql_loader/util/apsw_helpers.py
# class _NoDefault(object):
# class _RowBase(object):
# class Row(_RowBase):
# class SelectResult(list):
# def __init__(self, fields, values):
# def get(self, name, default=_NoDefault):
# def set(self, name, value):
# def __getattr__(self, name):
# def __setattr__(self, name, value):
# def __getitem__(self, name):
# def __setitem__(self, name, value):
# def __contains__(self, name):
# def __iter__(self):
# def __len__(self):
# def keys(self):
# def values(self):
# def items(self):
# def __eq__(self, other):
# def __ne__(self, other):
# def as_dict(self):
# def for_json(self):
# def nope(self, *args, **kwargs):
# def __init__(self, fields, rows, is_rows=False, RowClass=Row):
# def width(self):
# def __getitem__(self, i):
# def query(cursor, query, *params, **kwparams):
# def get(cursor, query, *params, **kwparams):
#
# Path: memsql_loader/util/super_json.py
# def simplejson_datetime_serializer(obj):
# def _set_defaults(kwargs):
# def dumps(data, **kwargs):
# def loads(data, **kwargs):
# def safe_loads(data, default, **kwargs):
# def pformat(d):
#
# Path: memsql_loader/util/apsw_sql_step_queue/errors.py
# class TaskDoesNotExist(APSWSQLStepQueueException):
# pass
#
# class StepAlreadyStarted(APSWSQLStepQueueException):
# pass
#
# class StepNotStarted(APSWSQLStepQueueException):
# pass
#
# class StepAlreadyFinished(APSWSQLStepQueueException):
# pass
#
# class StepRunning(APSWSQLStepQueueException):
# pass
#
# class AlreadyFinished(APSWSQLStepQueueException):
# pass
#
# Path: memsql_loader/util/apsw_sql_step_queue/time_helpers.py
# def unix_timestamp(dt):
# return int(time.mktime(dt.timetuple()))
, which may contain function names, class names, or code. Output only the next line. | WHERE |
Given snippet: <|code_start|> if self._running_steps() != 0:
raise StepRunning()
if self.finished is not None:
raise AlreadyFinished()
self._save(finished=datetime.utcnow(), result=result)
def requeue(self):
if self._running_steps() != 0:
raise StepRunning()
if self.finished is not None:
raise AlreadyFinished()
with self.storage.cursor() as cursor:
affected_row = apsw_helpers.get(cursor, '''
SELECT * from %s
WHERE
id = :task_id
AND execution_id = :execution_id
AND last_contact > datetime(:now, 'unixepoch', '-%s second')
''' % (self._queue.table_name, self._queue.execution_ttl),
now=unix_timestamp(datetime.utcnow()),
task_id=self.task_id,
execution_id=self.execution_id)
if affected_row is None:
raise TaskDoesNotExist()
with self.storage.transaction() as cursor:
apsw_helpers.query(cursor, '''
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import copy
from datetime import datetime
from dateutil import parser
from contextlib import contextmanager
from memsql_loader.util import apsw_helpers, super_json as json
from memsql_loader.util.apsw_sql_step_queue.errors import TaskDoesNotExist, StepAlreadyStarted, StepNotStarted, StepAlreadyFinished, StepRunning, AlreadyFinished
from memsql_loader.util.apsw_sql_step_queue.time_helpers import unix_timestamp
and context:
# Path: memsql_loader/util/apsw_helpers.py
# class _NoDefault(object):
# class _RowBase(object):
# class Row(_RowBase):
# class SelectResult(list):
# def __init__(self, fields, values):
# def get(self, name, default=_NoDefault):
# def set(self, name, value):
# def __getattr__(self, name):
# def __setattr__(self, name, value):
# def __getitem__(self, name):
# def __setitem__(self, name, value):
# def __contains__(self, name):
# def __iter__(self):
# def __len__(self):
# def keys(self):
# def values(self):
# def items(self):
# def __eq__(self, other):
# def __ne__(self, other):
# def as_dict(self):
# def for_json(self):
# def nope(self, *args, **kwargs):
# def __init__(self, fields, rows, is_rows=False, RowClass=Row):
# def width(self):
# def __getitem__(self, i):
# def query(cursor, query, *params, **kwparams):
# def get(cursor, query, *params, **kwparams):
#
# Path: memsql_loader/util/super_json.py
# def simplejson_datetime_serializer(obj):
# def _set_defaults(kwargs):
# def dumps(data, **kwargs):
# def loads(data, **kwargs):
# def safe_loads(data, default, **kwargs):
# def pformat(d):
#
# Path: memsql_loader/util/apsw_sql_step_queue/errors.py
# class TaskDoesNotExist(APSWSQLStepQueueException):
# pass
#
# class StepAlreadyStarted(APSWSQLStepQueueException):
# pass
#
# class StepNotStarted(APSWSQLStepQueueException):
# pass
#
# class StepAlreadyFinished(APSWSQLStepQueueException):
# pass
#
# class StepRunning(APSWSQLStepQueueException):
# pass
#
# class AlreadyFinished(APSWSQLStepQueueException):
# pass
#
# Path: memsql_loader/util/apsw_sql_step_queue/time_helpers.py
# def unix_timestamp(dt):
# return int(time.mktime(dt.timetuple()))
which might include code, classes, or functions. Output only the next line. | UPDATE %s |
Given the following code snippet before the placeholder: <|code_start|>
MODELS = { 'jobs': jobs.Jobs, 'tasks': tasks.Tasks }
def check_bootstrapped():
loader_storage = storage.LoaderStorage()
with loader_storage.cursor() as cursor:
rows = apsw_helpers.query(
cursor, 'SELECT name FROM sqlite_master WHERE type = "table"')
tables = [row.name for row in rows]
return all([model in tables for model in MODELS.keys()])
<|code_end|>
, predict the next line using imports from the current file:
from memsql_loader.loader_db import jobs, tasks
from memsql_loader.loader_db import storage
from memsql_loader.util import apsw_helpers, log
and context including class names, function names, and sometimes code from other files:
# Path: memsql_loader/loader_db/jobs.py
# PRIMARY_TABLE = apsw_sql_utility.TableDefinition('jobs', """\
# CREATE TABLE IF NOT EXISTS jobs (
# id BINARY(32) PRIMARY KEY,
# created DATETIME NOT NULL,
# spec TEXT NOT NULL
# )""", index_columns=('created',))
# def hash_64_bit(value):
# def __init__(self):
# def save(self, job):
# def delete(self, job):
# def get(self, job_id):
# def all(self):
# def query_target(self, host, port, database, table):
# def __init__(self, spec, job_id=None):
# def json_spec(self):
# def get_file_id(self, key):
# def has_file_id(self):
# def get_files(self, s3_conn=None):
# class Jobs(apsw_sql_utility.APSWSQLUtility):
# class Job(object):
#
# Path: memsql_loader/loader_db/tasks.py
# class TaskHandler(apsw_sql_step_queue.TaskHandler):
# class Tasks(apsw_sql_step_queue.APSWSQLStepQueue):
# def __init__(self, *args, **kwargs):
# def protect(self):
# def error(self, message):
# def requeue(self):
# def __init__(self):
# def bulk_finish(self, result='cancelled', extra_predicate=None):
# def get_tasks_in_state(self, state, extra_predicate=None):
#
# Path: memsql_loader/loader_db/storage.py
# MEMSQL_LOADER_DB = 'memsql_loader.db'
# def get_loader_db_path():
# def __new__(cls, *args, **kwargs):
# def drop_database(cls):
# def fork_wrapper(cls):
# def __init__(self):
# class LoaderStorage(APSWStorage):
#
# Path: memsql_loader/util/apsw_helpers.py
# class _NoDefault(object):
# class _RowBase(object):
# class Row(_RowBase):
# class SelectResult(list):
# def __init__(self, fields, values):
# def get(self, name, default=_NoDefault):
# def set(self, name, value):
# def __getattr__(self, name):
# def __setattr__(self, name, value):
# def __getitem__(self, name):
# def __setitem__(self, name, value):
# def __contains__(self, name):
# def __iter__(self):
# def __len__(self):
# def keys(self):
# def values(self):
# def items(self):
# def __eq__(self, other):
# def __ne__(self, other):
# def as_dict(self):
# def for_json(self):
# def nope(self, *args, **kwargs):
# def __init__(self, fields, rows, is_rows=False, RowClass=Row):
# def width(self):
# def __getitem__(self, i):
# def query(cursor, query, *params, **kwparams):
# def get(cursor, query, *params, **kwparams):
#
# Path: memsql_loader/util/log.py
# def setup(log_path=None, stdout_enabled=True):
# def update_verbosity(debug=False, extra_verbose=False):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def configure(parser):
# def get_logger(name, level=None):
# def format(self, record):
# def __init__(self, name, level=logging.DEBUG):
# class _SetDebug(argparse.Action):
# class _SetStdout(argparse.Action):
# class _SetLogPath(argparse.Action):
# class Formatter(logging.Formatter):
# class Logger(logging.Logger):
. Output only the next line. | def bootstrap(force=False): |
Continue the code snippet: <|code_start|>
MODELS = { 'jobs': jobs.Jobs, 'tasks': tasks.Tasks }
def check_bootstrapped():
loader_storage = storage.LoaderStorage()
with loader_storage.cursor() as cursor:
rows = apsw_helpers.query(
cursor, 'SELECT name FROM sqlite_master WHERE type = "table"')
tables = [row.name for row in rows]
return all([model in tables for model in MODELS.keys()])
def bootstrap(force=False):
logger = log.get_logger('Bootstrap') # noqa
def write_log(title, name, msg):
log_title_width = 28
title = ("%s [%s]: " % (title, name)).rjust(log_title_width, ' ')
logger.info(title + msg)
write_log('Database', storage.MEMSQL_LOADER_DB, 'Checking...')
if force:
write_log('Database', storage.MEMSQL_LOADER_DB, 'Dropping...')
storage.LoaderStorage.drop_database()
write_log('Database', storage.MEMSQL_LOADER_DB, 'Ready.')
<|code_end|>
. Use current file imports:
from memsql_loader.loader_db import jobs, tasks
from memsql_loader.loader_db import storage
from memsql_loader.util import apsw_helpers, log
and context (classes, functions, or code) from other files:
# Path: memsql_loader/loader_db/jobs.py
# PRIMARY_TABLE = apsw_sql_utility.TableDefinition('jobs', """\
# CREATE TABLE IF NOT EXISTS jobs (
# id BINARY(32) PRIMARY KEY,
# created DATETIME NOT NULL,
# spec TEXT NOT NULL
# )""", index_columns=('created',))
# def hash_64_bit(value):
# def __init__(self):
# def save(self, job):
# def delete(self, job):
# def get(self, job_id):
# def all(self):
# def query_target(self, host, port, database, table):
# def __init__(self, spec, job_id=None):
# def json_spec(self):
# def get_file_id(self, key):
# def has_file_id(self):
# def get_files(self, s3_conn=None):
# class Jobs(apsw_sql_utility.APSWSQLUtility):
# class Job(object):
#
# Path: memsql_loader/loader_db/tasks.py
# class TaskHandler(apsw_sql_step_queue.TaskHandler):
# class Tasks(apsw_sql_step_queue.APSWSQLStepQueue):
# def __init__(self, *args, **kwargs):
# def protect(self):
# def error(self, message):
# def requeue(self):
# def __init__(self):
# def bulk_finish(self, result='cancelled', extra_predicate=None):
# def get_tasks_in_state(self, state, extra_predicate=None):
#
# Path: memsql_loader/loader_db/storage.py
# MEMSQL_LOADER_DB = 'memsql_loader.db'
# def get_loader_db_path():
# def __new__(cls, *args, **kwargs):
# def drop_database(cls):
# def fork_wrapper(cls):
# def __init__(self):
# class LoaderStorage(APSWStorage):
#
# Path: memsql_loader/util/apsw_helpers.py
# class _NoDefault(object):
# class _RowBase(object):
# class Row(_RowBase):
# class SelectResult(list):
# def __init__(self, fields, values):
# def get(self, name, default=_NoDefault):
# def set(self, name, value):
# def __getattr__(self, name):
# def __setattr__(self, name, value):
# def __getitem__(self, name):
# def __setitem__(self, name, value):
# def __contains__(self, name):
# def __iter__(self):
# def __len__(self):
# def keys(self):
# def values(self):
# def items(self):
# def __eq__(self, other):
# def __ne__(self, other):
# def as_dict(self):
# def for_json(self):
# def nope(self, *args, **kwargs):
# def __init__(self, fields, rows, is_rows=False, RowClass=Row):
# def width(self):
# def __getitem__(self, i):
# def query(cursor, query, *params, **kwparams):
# def get(cursor, query, *params, **kwparams):
#
# Path: memsql_loader/util/log.py
# def setup(log_path=None, stdout_enabled=True):
# def update_verbosity(debug=False, extra_verbose=False):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def configure(parser):
# def get_logger(name, level=None):
# def format(self, record):
# def __init__(self, name, level=logging.DEBUG):
# class _SetDebug(argparse.Action):
# class _SetStdout(argparse.Action):
# class _SetLogPath(argparse.Action):
# class Formatter(logging.Formatter):
# class Logger(logging.Logger):
. Output only the next line. | for Model in MODELS.values(): |
Here is a snippet: <|code_start|>
MODELS = { 'jobs': jobs.Jobs, 'tasks': tasks.Tasks }
def check_bootstrapped():
loader_storage = storage.LoaderStorage()
with loader_storage.cursor() as cursor:
rows = apsw_helpers.query(
cursor, 'SELECT name FROM sqlite_master WHERE type = "table"')
tables = [row.name for row in rows]
return all([model in tables for model in MODELS.keys()])
def bootstrap(force=False):
logger = log.get_logger('Bootstrap') # noqa
def write_log(title, name, msg):
log_title_width = 28
title = ("%s [%s]: " % (title, name)).rjust(log_title_width, ' ')
logger.info(title + msg)
write_log('Database', storage.MEMSQL_LOADER_DB, 'Checking...')
if force:
write_log('Database', storage.MEMSQL_LOADER_DB, 'Dropping...')
storage.LoaderStorage.drop_database()
write_log('Database', storage.MEMSQL_LOADER_DB, 'Ready.')
for Model in MODELS.values():
instance = Model()
if not instance.ready():
write_log('Table', Model.__name__, 'Bootstrapping...')
instance.setup()
<|code_end|>
. Write the next line using the current file imports:
from memsql_loader.loader_db import jobs, tasks
from memsql_loader.loader_db import storage
from memsql_loader.util import apsw_helpers, log
and context from other files:
# Path: memsql_loader/loader_db/jobs.py
# PRIMARY_TABLE = apsw_sql_utility.TableDefinition('jobs', """\
# CREATE TABLE IF NOT EXISTS jobs (
# id BINARY(32) PRIMARY KEY,
# created DATETIME NOT NULL,
# spec TEXT NOT NULL
# )""", index_columns=('created',))
# def hash_64_bit(value):
# def __init__(self):
# def save(self, job):
# def delete(self, job):
# def get(self, job_id):
# def all(self):
# def query_target(self, host, port, database, table):
# def __init__(self, spec, job_id=None):
# def json_spec(self):
# def get_file_id(self, key):
# def has_file_id(self):
# def get_files(self, s3_conn=None):
# class Jobs(apsw_sql_utility.APSWSQLUtility):
# class Job(object):
#
# Path: memsql_loader/loader_db/tasks.py
# class TaskHandler(apsw_sql_step_queue.TaskHandler):
# class Tasks(apsw_sql_step_queue.APSWSQLStepQueue):
# def __init__(self, *args, **kwargs):
# def protect(self):
# def error(self, message):
# def requeue(self):
# def __init__(self):
# def bulk_finish(self, result='cancelled', extra_predicate=None):
# def get_tasks_in_state(self, state, extra_predicate=None):
#
# Path: memsql_loader/loader_db/storage.py
# MEMSQL_LOADER_DB = 'memsql_loader.db'
# def get_loader_db_path():
# def __new__(cls, *args, **kwargs):
# def drop_database(cls):
# def fork_wrapper(cls):
# def __init__(self):
# class LoaderStorage(APSWStorage):
#
# Path: memsql_loader/util/apsw_helpers.py
# class _NoDefault(object):
# class _RowBase(object):
# class Row(_RowBase):
# class SelectResult(list):
# def __init__(self, fields, values):
# def get(self, name, default=_NoDefault):
# def set(self, name, value):
# def __getattr__(self, name):
# def __setattr__(self, name, value):
# def __getitem__(self, name):
# def __setitem__(self, name, value):
# def __contains__(self, name):
# def __iter__(self):
# def __len__(self):
# def keys(self):
# def values(self):
# def items(self):
# def __eq__(self, other):
# def __ne__(self, other):
# def as_dict(self):
# def for_json(self):
# def nope(self, *args, **kwargs):
# def __init__(self, fields, rows, is_rows=False, RowClass=Row):
# def width(self):
# def __getitem__(self, i):
# def query(cursor, query, *params, **kwparams):
# def get(cursor, query, *params, **kwparams):
#
# Path: memsql_loader/util/log.py
# def setup(log_path=None, stdout_enabled=True):
# def update_verbosity(debug=False, extra_verbose=False):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def configure(parser):
# def get_logger(name, level=None):
# def format(self, record):
# def __init__(self, name, level=logging.DEBUG):
# class _SetDebug(argparse.Action):
# class _SetStdout(argparse.Action):
# class _SetLogPath(argparse.Action):
# class Formatter(logging.Formatter):
# class Logger(logging.Logger):
, which may include functions, classes, or code. Output only the next line. | write_log('Table', Model.__name__, 'Ready.') |
Based on the snippet: <|code_start|>
MODELS = { 'jobs': jobs.Jobs, 'tasks': tasks.Tasks }
def check_bootstrapped():
loader_storage = storage.LoaderStorage()
with loader_storage.cursor() as cursor:
rows = apsw_helpers.query(
cursor, 'SELECT name FROM sqlite_master WHERE type = "table"')
tables = [row.name for row in rows]
return all([model in tables for model in MODELS.keys()])
def bootstrap(force=False):
logger = log.get_logger('Bootstrap') # noqa
def write_log(title, name, msg):
log_title_width = 28
title = ("%s [%s]: " % (title, name)).rjust(log_title_width, ' ')
logger.info(title + msg)
write_log('Database', storage.MEMSQL_LOADER_DB, 'Checking...')
if force:
write_log('Database', storage.MEMSQL_LOADER_DB, 'Dropping...')
storage.LoaderStorage.drop_database()
write_log('Database', storage.MEMSQL_LOADER_DB, 'Ready.')
for Model in MODELS.values():
instance = Model()
<|code_end|>
, predict the immediate next line with the help of imports:
from memsql_loader.loader_db import jobs, tasks
from memsql_loader.loader_db import storage
from memsql_loader.util import apsw_helpers, log
and context (classes, functions, sometimes code) from other files:
# Path: memsql_loader/loader_db/jobs.py
# PRIMARY_TABLE = apsw_sql_utility.TableDefinition('jobs', """\
# CREATE TABLE IF NOT EXISTS jobs (
# id BINARY(32) PRIMARY KEY,
# created DATETIME NOT NULL,
# spec TEXT NOT NULL
# )""", index_columns=('created',))
# def hash_64_bit(value):
# def __init__(self):
# def save(self, job):
# def delete(self, job):
# def get(self, job_id):
# def all(self):
# def query_target(self, host, port, database, table):
# def __init__(self, spec, job_id=None):
# def json_spec(self):
# def get_file_id(self, key):
# def has_file_id(self):
# def get_files(self, s3_conn=None):
# class Jobs(apsw_sql_utility.APSWSQLUtility):
# class Job(object):
#
# Path: memsql_loader/loader_db/tasks.py
# class TaskHandler(apsw_sql_step_queue.TaskHandler):
# class Tasks(apsw_sql_step_queue.APSWSQLStepQueue):
# def __init__(self, *args, **kwargs):
# def protect(self):
# def error(self, message):
# def requeue(self):
# def __init__(self):
# def bulk_finish(self, result='cancelled', extra_predicate=None):
# def get_tasks_in_state(self, state, extra_predicate=None):
#
# Path: memsql_loader/loader_db/storage.py
# MEMSQL_LOADER_DB = 'memsql_loader.db'
# def get_loader_db_path():
# def __new__(cls, *args, **kwargs):
# def drop_database(cls):
# def fork_wrapper(cls):
# def __init__(self):
# class LoaderStorage(APSWStorage):
#
# Path: memsql_loader/util/apsw_helpers.py
# class _NoDefault(object):
# class _RowBase(object):
# class Row(_RowBase):
# class SelectResult(list):
# def __init__(self, fields, values):
# def get(self, name, default=_NoDefault):
# def set(self, name, value):
# def __getattr__(self, name):
# def __setattr__(self, name, value):
# def __getitem__(self, name):
# def __setitem__(self, name, value):
# def __contains__(self, name):
# def __iter__(self):
# def __len__(self):
# def keys(self):
# def values(self):
# def items(self):
# def __eq__(self, other):
# def __ne__(self, other):
# def as_dict(self):
# def for_json(self):
# def nope(self, *args, **kwargs):
# def __init__(self, fields, rows, is_rows=False, RowClass=Row):
# def width(self):
# def __getitem__(self, i):
# def query(cursor, query, *params, **kwparams):
# def get(cursor, query, *params, **kwparams):
#
# Path: memsql_loader/util/log.py
# def setup(log_path=None, stdout_enabled=True):
# def update_verbosity(debug=False, extra_verbose=False):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def configure(parser):
# def get_logger(name, level=None):
# def format(self, record):
# def __init__(self, name, level=logging.DEBUG):
# class _SetDebug(argparse.Action):
# class _SetStdout(argparse.Action):
# class _SetLogPath(argparse.Action):
# class Formatter(logging.Formatter):
# class Logger(logging.Logger):
. Output only the next line. | if not instance.ready(): |
Next line prediction: <|code_start|>
MODELS = { 'jobs': jobs.Jobs, 'tasks': tasks.Tasks }
def check_bootstrapped():
loader_storage = storage.LoaderStorage()
with loader_storage.cursor() as cursor:
rows = apsw_helpers.query(
cursor, 'SELECT name FROM sqlite_master WHERE type = "table"')
tables = [row.name for row in rows]
return all([model in tables for model in MODELS.keys()])
<|code_end|>
. Use current file imports:
(from memsql_loader.loader_db import jobs, tasks
from memsql_loader.loader_db import storage
from memsql_loader.util import apsw_helpers, log)
and context including class names, function names, or small code snippets from other files:
# Path: memsql_loader/loader_db/jobs.py
# PRIMARY_TABLE = apsw_sql_utility.TableDefinition('jobs', """\
# CREATE TABLE IF NOT EXISTS jobs (
# id BINARY(32) PRIMARY KEY,
# created DATETIME NOT NULL,
# spec TEXT NOT NULL
# )""", index_columns=('created',))
# def hash_64_bit(value):
# def __init__(self):
# def save(self, job):
# def delete(self, job):
# def get(self, job_id):
# def all(self):
# def query_target(self, host, port, database, table):
# def __init__(self, spec, job_id=None):
# def json_spec(self):
# def get_file_id(self, key):
# def has_file_id(self):
# def get_files(self, s3_conn=None):
# class Jobs(apsw_sql_utility.APSWSQLUtility):
# class Job(object):
#
# Path: memsql_loader/loader_db/tasks.py
# class TaskHandler(apsw_sql_step_queue.TaskHandler):
# class Tasks(apsw_sql_step_queue.APSWSQLStepQueue):
# def __init__(self, *args, **kwargs):
# def protect(self):
# def error(self, message):
# def requeue(self):
# def __init__(self):
# def bulk_finish(self, result='cancelled', extra_predicate=None):
# def get_tasks_in_state(self, state, extra_predicate=None):
#
# Path: memsql_loader/loader_db/storage.py
# MEMSQL_LOADER_DB = 'memsql_loader.db'
# def get_loader_db_path():
# def __new__(cls, *args, **kwargs):
# def drop_database(cls):
# def fork_wrapper(cls):
# def __init__(self):
# class LoaderStorage(APSWStorage):
#
# Path: memsql_loader/util/apsw_helpers.py
# class _NoDefault(object):
# class _RowBase(object):
# class Row(_RowBase):
# class SelectResult(list):
# def __init__(self, fields, values):
# def get(self, name, default=_NoDefault):
# def set(self, name, value):
# def __getattr__(self, name):
# def __setattr__(self, name, value):
# def __getitem__(self, name):
# def __setitem__(self, name, value):
# def __contains__(self, name):
# def __iter__(self):
# def __len__(self):
# def keys(self):
# def values(self):
# def items(self):
# def __eq__(self, other):
# def __ne__(self, other):
# def as_dict(self):
# def for_json(self):
# def nope(self, *args, **kwargs):
# def __init__(self, fields, rows, is_rows=False, RowClass=Row):
# def width(self):
# def __getitem__(self, i):
# def query(cursor, query, *params, **kwparams):
# def get(cursor, query, *params, **kwparams):
#
# Path: memsql_loader/util/log.py
# def setup(log_path=None, stdout_enabled=True):
# def update_verbosity(debug=False, extra_verbose=False):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def configure(parser):
# def get_logger(name, level=None):
# def format(self, record):
# def __init__(self, name, level=logging.DEBUG):
# class _SetDebug(argparse.Action):
# class _SetStdout(argparse.Action):
# class _SetLogPath(argparse.Action):
# class Formatter(logging.Formatter):
# class Logger(logging.Logger):
. Output only the next line. | def bootstrap(force=False): |
Given the following code snippet before the placeholder: <|code_start|> elif columns is None and len(data) > 0:
self.data = data
self.columns = data[0].keys()
else:
self.data = []
self.columns = []
self.align = align
self.sort_by = sort_by
self.reverse_sort = sort_dir == SortDirection.DESC
self.tablefmt = format
def format(self):
if self.tablefmt == TableFormat.JSON:
# TODO(cary) Patch clark.super_enum to support JSON serialization
printable_data = [
{ k: str(v) if isinstance(v, SuperEnum.Element) else v for k, v in row.iteritems() }
for row in self.data
]
return json.dumps(printable_data, sort_keys=True, indent=4 * ' ').encode('utf-8')
else:
ptable = PrettyTable(self.columns)
for k, v in self.align.iteritems():
ptable.align[k] = v
for row in self.data:
ptable.add_row([ row[col] for col in self.columns ])
if self.tablefmt == TableFormat.TABLE:
return ptable.get_string(sortby=self.sort_by, reversesort=self.reverse_sort).encode('utf-8')
<|code_end|>
, predict the next line using imports from the current file:
from prettytable import PrettyTable
from clark.super_enum import SuperEnum
from memsql_loader.api.shared import SortDirection
from memsql_loader.util import super_json as json
and context including class names, function names, and sometimes code from other files:
# Path: memsql_loader/api/shared.py
# class SortDirection(SuperEnum):
# DESC = SuperEnum.E
# ASC = SuperEnum.E
#
# Path: memsql_loader/util/super_json.py
# def simplejson_datetime_serializer(obj):
# def _set_defaults(kwargs):
# def dumps(data, **kwargs):
# def loads(data, **kwargs):
# def safe_loads(data, default, **kwargs):
# def pformat(d):
. Output only the next line. | elif self.tablefmt == TableFormat.HTML: |
Here is a snippet: <|code_start|>
class Tasks(Api):
SORTABLE_COLUMNS = ['id', 'key_name', 'created', 'started', 'finished', 'state', 'error_msg']
validate = V.Schema({
V.Required('job_id'): basestring,
'state': listor(validate_enum(shared.TaskState)),
V.Required('order', default=shared.SortDirection.ASC): validate_enum(shared.SortDirection),
V.Required('order_by', default='id'): V.Any(*SORTABLE_COLUMNS),
'page_size': V.Range(1, 100000),
V.Required('page', default=1): V.All(int, V.Range(min=1, max=sys.maxint))
})
def _execute(self, params):
generated_sql, query_params = self._generate_sql(params)
job_count = self._db_get('''
<|code_end|>
. Write the next line using the current file imports:
from memsql_loader.api.base import Api
from memsql_loader.api.validation import V, validate_enum, listor
from memsql_loader.api import shared, exceptions
import sys
and context from other files:
# Path: memsql_loader/api/base.py
# class Api(object):
# name = None
#
# def __init__(self):
# self.logger = log.get_logger(self.name or 'api')
# self.storage = LoaderStorage()
#
# def query(self, params):
# assert 'validate' in dir(self), '`validate` must be defined'
# return self._execute(self.validate(params))
#
# def _execute(self, params):
# raise NotImplemented()
#
# def __db_caller(self, callback):
# with self.storage.transaction() as cursor:
# return callback(cursor)
#
# def _db_query(self, *args, **kwargs):
# return self.__db_caller(lambda c: apsw_helpers.query(c, *args, **kwargs))
#
# def _db_custom_query(self, callback):
# return self.__db_caller(callback)
#
# def _db_get(self, *args, **kwargs):
# return self.__db_caller(lambda c: apsw_helpers.get(c, *args, **kwargs))
#
# Path: memsql_loader/api/validation.py
# def listor(sub_validator):
# def _validate(value):
# def validate_enum(EnumType):
# def _validate(value):
#
# Path: memsql_loader/api/shared.py
# TASKS_TTL = 120
# QUEUED = SuperEnum.E
# RUNNING = SuperEnum.E
# SUCCESS = SuperEnum.E
# ERROR = SuperEnum.E
# CANCELLED = SuperEnum.E
# SUCCESS_CONDITION = 'tasks.result = \'success\''
# ERROR_CONDITION = 'tasks.result = \'error\''
# CANCELLED_CONDITION = 'tasks.result = \'cancelled\''
# FINISHED_CONDITION = 'tasks.finished IS NOT NULL'
# QUEUED_CONDITION = 'tasks.finished IS NULL AND (tasks.execution_id IS NULL OR tasks.last_contact <= datetime(:now, "unixepoch", "-%s second"))' % TASKS_TTL
# PROJECTION = re.sub(r'\s+', ' ', """
# (CASE
# WHEN (%s) THEN UPPER(tasks.result)
# WHEN (%s) THEN 'QUEUED'
# ELSE 'RUNNING'
# END)
# """ % (FINISHED_CONDITION, QUEUED_CONDITION)).strip()
# QUEUED = SuperEnum.E
# RUNNING = SuperEnum.E
# FINISHED = SuperEnum.E
# CANCELLED = SuperEnum.E
# PROJECTION = re.sub(r'\s+', ' ', '''
# (CASE
# WHEN (
# (job_tasks.tasks_total - job_tasks.tasks_finished) = 0
# AND job_tasks.tasks_cancelled > 0) THEN 'CANCELLED'
# WHEN (
# job_tasks.tasks_total IS NULL
# OR job_tasks.tasks_finished = job_tasks.tasks_total) THEN 'FINISHED'
# WHEN (job_tasks.tasks_queued = job_tasks.tasks_total) THEN 'QUEUED'
# ELSE 'RUNNING'
# END)
# ''').strip()
# DESC = SuperEnum.E
# ASC = SuperEnum.E
# class TaskState(SuperEnum):
# class JobState(SuperEnum):
# class SortDirection(SuperEnum):
# def projection_params():
# def task_load_row(row):
# def job_load_row(row):
#
# Path: memsql_loader/api/exceptions.py
# class ApiException(Exception):
# class DBConnectionIssue(ApiException):
# class DBError(ApiException):
# def __str__(self):
# def __init__(self, *args):
# def __str__(self):
, which may include functions, classes, or code. Output only the next line. | SELECT COUNT(*) AS count |
Given the code snippet: <|code_start|> 'state': listor(validate_enum(shared.TaskState)),
V.Required('order', default=shared.SortDirection.ASC): validate_enum(shared.SortDirection),
V.Required('order_by', default='id'): V.Any(*SORTABLE_COLUMNS),
'page_size': V.Range(1, 100000),
V.Required('page', default=1): V.All(int, V.Range(min=1, max=sys.maxint))
})
def _execute(self, params):
generated_sql, query_params = self._generate_sql(params)
job_count = self._db_get('''
SELECT COUNT(*) AS count
FROM jobs
WHERE %(job_id_predicate)s
''' % generated_sql, **query_params).count
if job_count == 0:
raise exceptions.ApiException('No job found with id `%s`' % params['job_id'])
elif job_count > 1:
raise exceptions.ApiException('More than one job matches id `%s`, try using a more specific prefix' % params['job_id'])
rows = self._db_query('''
SELECT
tasks.*,
%(state_projection)s AS state
FROM tasks INNER JOIN jobs ON jobs.id = tasks.job_id
WHERE
%(job_id_predicate)s
%(state_predicate)s
ORDER BY %(order_by)s %(order)s
<|code_end|>
, generate the next line using the imports in this file:
from memsql_loader.api.base import Api
from memsql_loader.api.validation import V, validate_enum, listor
from memsql_loader.api import shared, exceptions
import sys
and context (functions, classes, or occasionally code) from other files:
# Path: memsql_loader/api/base.py
# class Api(object):
# name = None
#
# def __init__(self):
# self.logger = log.get_logger(self.name or 'api')
# self.storage = LoaderStorage()
#
# def query(self, params):
# assert 'validate' in dir(self), '`validate` must be defined'
# return self._execute(self.validate(params))
#
# def _execute(self, params):
# raise NotImplemented()
#
# def __db_caller(self, callback):
# with self.storage.transaction() as cursor:
# return callback(cursor)
#
# def _db_query(self, *args, **kwargs):
# return self.__db_caller(lambda c: apsw_helpers.query(c, *args, **kwargs))
#
# def _db_custom_query(self, callback):
# return self.__db_caller(callback)
#
# def _db_get(self, *args, **kwargs):
# return self.__db_caller(lambda c: apsw_helpers.get(c, *args, **kwargs))
#
# Path: memsql_loader/api/validation.py
# def listor(sub_validator):
# def _validate(value):
# def validate_enum(EnumType):
# def _validate(value):
#
# Path: memsql_loader/api/shared.py
# TASKS_TTL = 120
# QUEUED = SuperEnum.E
# RUNNING = SuperEnum.E
# SUCCESS = SuperEnum.E
# ERROR = SuperEnum.E
# CANCELLED = SuperEnum.E
# SUCCESS_CONDITION = 'tasks.result = \'success\''
# ERROR_CONDITION = 'tasks.result = \'error\''
# CANCELLED_CONDITION = 'tasks.result = \'cancelled\''
# FINISHED_CONDITION = 'tasks.finished IS NOT NULL'
# QUEUED_CONDITION = 'tasks.finished IS NULL AND (tasks.execution_id IS NULL OR tasks.last_contact <= datetime(:now, "unixepoch", "-%s second"))' % TASKS_TTL
# PROJECTION = re.sub(r'\s+', ' ', """
# (CASE
# WHEN (%s) THEN UPPER(tasks.result)
# WHEN (%s) THEN 'QUEUED'
# ELSE 'RUNNING'
# END)
# """ % (FINISHED_CONDITION, QUEUED_CONDITION)).strip()
# QUEUED = SuperEnum.E
# RUNNING = SuperEnum.E
# FINISHED = SuperEnum.E
# CANCELLED = SuperEnum.E
# PROJECTION = re.sub(r'\s+', ' ', '''
# (CASE
# WHEN (
# (job_tasks.tasks_total - job_tasks.tasks_finished) = 0
# AND job_tasks.tasks_cancelled > 0) THEN 'CANCELLED'
# WHEN (
# job_tasks.tasks_total IS NULL
# OR job_tasks.tasks_finished = job_tasks.tasks_total) THEN 'FINISHED'
# WHEN (job_tasks.tasks_queued = job_tasks.tasks_total) THEN 'QUEUED'
# ELSE 'RUNNING'
# END)
# ''').strip()
# DESC = SuperEnum.E
# ASC = SuperEnum.E
# class TaskState(SuperEnum):
# class JobState(SuperEnum):
# class SortDirection(SuperEnum):
# def projection_params():
# def task_load_row(row):
# def job_load_row(row):
#
# Path: memsql_loader/api/exceptions.py
# class ApiException(Exception):
# class DBConnectionIssue(ApiException):
# class DBError(ApiException):
# def __str__(self):
# def __init__(self, *args):
# def __str__(self):
. Output only the next line. | %(paging)s |
Based on the snippet: <|code_start|>
class Tasks(Api):
SORTABLE_COLUMNS = ['id', 'key_name', 'created', 'started', 'finished', 'state', 'error_msg']
validate = V.Schema({
V.Required('job_id'): basestring,
'state': listor(validate_enum(shared.TaskState)),
V.Required('order', default=shared.SortDirection.ASC): validate_enum(shared.SortDirection),
V.Required('order_by', default='id'): V.Any(*SORTABLE_COLUMNS),
'page_size': V.Range(1, 100000),
V.Required('page', default=1): V.All(int, V.Range(min=1, max=sys.maxint))
})
def _execute(self, params):
generated_sql, query_params = self._generate_sql(params)
job_count = self._db_get('''
SELECT COUNT(*) AS count
FROM jobs
<|code_end|>
, predict the immediate next line with the help of imports:
from memsql_loader.api.base import Api
from memsql_loader.api.validation import V, validate_enum, listor
from memsql_loader.api import shared, exceptions
import sys
and context (classes, functions, sometimes code) from other files:
# Path: memsql_loader/api/base.py
# class Api(object):
# name = None
#
# def __init__(self):
# self.logger = log.get_logger(self.name or 'api')
# self.storage = LoaderStorage()
#
# def query(self, params):
# assert 'validate' in dir(self), '`validate` must be defined'
# return self._execute(self.validate(params))
#
# def _execute(self, params):
# raise NotImplemented()
#
# def __db_caller(self, callback):
# with self.storage.transaction() as cursor:
# return callback(cursor)
#
# def _db_query(self, *args, **kwargs):
# return self.__db_caller(lambda c: apsw_helpers.query(c, *args, **kwargs))
#
# def _db_custom_query(self, callback):
# return self.__db_caller(callback)
#
# def _db_get(self, *args, **kwargs):
# return self.__db_caller(lambda c: apsw_helpers.get(c, *args, **kwargs))
#
# Path: memsql_loader/api/validation.py
# def listor(sub_validator):
# def _validate(value):
# def validate_enum(EnumType):
# def _validate(value):
#
# Path: memsql_loader/api/shared.py
# TASKS_TTL = 120
# QUEUED = SuperEnum.E
# RUNNING = SuperEnum.E
# SUCCESS = SuperEnum.E
# ERROR = SuperEnum.E
# CANCELLED = SuperEnum.E
# SUCCESS_CONDITION = 'tasks.result = \'success\''
# ERROR_CONDITION = 'tasks.result = \'error\''
# CANCELLED_CONDITION = 'tasks.result = \'cancelled\''
# FINISHED_CONDITION = 'tasks.finished IS NOT NULL'
# QUEUED_CONDITION = 'tasks.finished IS NULL AND (tasks.execution_id IS NULL OR tasks.last_contact <= datetime(:now, "unixepoch", "-%s second"))' % TASKS_TTL
# PROJECTION = re.sub(r'\s+', ' ', """
# (CASE
# WHEN (%s) THEN UPPER(tasks.result)
# WHEN (%s) THEN 'QUEUED'
# ELSE 'RUNNING'
# END)
# """ % (FINISHED_CONDITION, QUEUED_CONDITION)).strip()
# QUEUED = SuperEnum.E
# RUNNING = SuperEnum.E
# FINISHED = SuperEnum.E
# CANCELLED = SuperEnum.E
# PROJECTION = re.sub(r'\s+', ' ', '''
# (CASE
# WHEN (
# (job_tasks.tasks_total - job_tasks.tasks_finished) = 0
# AND job_tasks.tasks_cancelled > 0) THEN 'CANCELLED'
# WHEN (
# job_tasks.tasks_total IS NULL
# OR job_tasks.tasks_finished = job_tasks.tasks_total) THEN 'FINISHED'
# WHEN (job_tasks.tasks_queued = job_tasks.tasks_total) THEN 'QUEUED'
# ELSE 'RUNNING'
# END)
# ''').strip()
# DESC = SuperEnum.E
# ASC = SuperEnum.E
# class TaskState(SuperEnum):
# class JobState(SuperEnum):
# class SortDirection(SuperEnum):
# def projection_params():
# def task_load_row(row):
# def job_load_row(row):
#
# Path: memsql_loader/api/exceptions.py
# class ApiException(Exception):
# class DBConnectionIssue(ApiException):
# class DBError(ApiException):
# def __str__(self):
# def __init__(self, *args):
# def __str__(self):
. Output only the next line. | WHERE %(job_id_predicate)s |
Predict the next line for this snippet: <|code_start|>
class Tasks(Api):
SORTABLE_COLUMNS = ['id', 'key_name', 'created', 'started', 'finished', 'state', 'error_msg']
validate = V.Schema({
V.Required('job_id'): basestring,
'state': listor(validate_enum(shared.TaskState)),
V.Required('order', default=shared.SortDirection.ASC): validate_enum(shared.SortDirection),
V.Required('order_by', default='id'): V.Any(*SORTABLE_COLUMNS),
'page_size': V.Range(1, 100000),
V.Required('page', default=1): V.All(int, V.Range(min=1, max=sys.maxint))
})
def _execute(self, params):
generated_sql, query_params = self._generate_sql(params)
job_count = self._db_get('''
SELECT COUNT(*) AS count
FROM jobs
WHERE %(job_id_predicate)s
''' % generated_sql, **query_params).count
if job_count == 0:
raise exceptions.ApiException('No job found with id `%s`' % params['job_id'])
elif job_count > 1:
raise exceptions.ApiException('More than one job matches id `%s`, try using a more specific prefix' % params['job_id'])
rows = self._db_query('''
SELECT
<|code_end|>
with the help of current file imports:
from memsql_loader.api.base import Api
from memsql_loader.api.validation import V, validate_enum, listor
from memsql_loader.api import shared, exceptions
import sys
and context from other files:
# Path: memsql_loader/api/base.py
# class Api(object):
# name = None
#
# def __init__(self):
# self.logger = log.get_logger(self.name or 'api')
# self.storage = LoaderStorage()
#
# def query(self, params):
# assert 'validate' in dir(self), '`validate` must be defined'
# return self._execute(self.validate(params))
#
# def _execute(self, params):
# raise NotImplemented()
#
# def __db_caller(self, callback):
# with self.storage.transaction() as cursor:
# return callback(cursor)
#
# def _db_query(self, *args, **kwargs):
# return self.__db_caller(lambda c: apsw_helpers.query(c, *args, **kwargs))
#
# def _db_custom_query(self, callback):
# return self.__db_caller(callback)
#
# def _db_get(self, *args, **kwargs):
# return self.__db_caller(lambda c: apsw_helpers.get(c, *args, **kwargs))
#
# Path: memsql_loader/api/validation.py
# def listor(sub_validator):
# def _validate(value):
# def validate_enum(EnumType):
# def _validate(value):
#
# Path: memsql_loader/api/shared.py
# TASKS_TTL = 120
# QUEUED = SuperEnum.E
# RUNNING = SuperEnum.E
# SUCCESS = SuperEnum.E
# ERROR = SuperEnum.E
# CANCELLED = SuperEnum.E
# SUCCESS_CONDITION = 'tasks.result = \'success\''
# ERROR_CONDITION = 'tasks.result = \'error\''
# CANCELLED_CONDITION = 'tasks.result = \'cancelled\''
# FINISHED_CONDITION = 'tasks.finished IS NOT NULL'
# QUEUED_CONDITION = 'tasks.finished IS NULL AND (tasks.execution_id IS NULL OR tasks.last_contact <= datetime(:now, "unixepoch", "-%s second"))' % TASKS_TTL
# PROJECTION = re.sub(r'\s+', ' ', """
# (CASE
# WHEN (%s) THEN UPPER(tasks.result)
# WHEN (%s) THEN 'QUEUED'
# ELSE 'RUNNING'
# END)
# """ % (FINISHED_CONDITION, QUEUED_CONDITION)).strip()
# QUEUED = SuperEnum.E
# RUNNING = SuperEnum.E
# FINISHED = SuperEnum.E
# CANCELLED = SuperEnum.E
# PROJECTION = re.sub(r'\s+', ' ', '''
# (CASE
# WHEN (
# (job_tasks.tasks_total - job_tasks.tasks_finished) = 0
# AND job_tasks.tasks_cancelled > 0) THEN 'CANCELLED'
# WHEN (
# job_tasks.tasks_total IS NULL
# OR job_tasks.tasks_finished = job_tasks.tasks_total) THEN 'FINISHED'
# WHEN (job_tasks.tasks_queued = job_tasks.tasks_total) THEN 'QUEUED'
# ELSE 'RUNNING'
# END)
# ''').strip()
# DESC = SuperEnum.E
# ASC = SuperEnum.E
# class TaskState(SuperEnum):
# class JobState(SuperEnum):
# class SortDirection(SuperEnum):
# def projection_params():
# def task_load_row(row):
# def job_load_row(row):
#
# Path: memsql_loader/api/exceptions.py
# class ApiException(Exception):
# class DBConnectionIssue(ApiException):
# class DBError(ApiException):
# def __str__(self):
# def __init__(self, *args):
# def __str__(self):
, which may contain function names, class names, or code. Output only the next line. | tasks.*, |
Continue the code snippet: <|code_start|> V.Required('job_id'): basestring,
'state': listor(validate_enum(shared.TaskState)),
V.Required('order', default=shared.SortDirection.ASC): validate_enum(shared.SortDirection),
V.Required('order_by', default='id'): V.Any(*SORTABLE_COLUMNS),
'page_size': V.Range(1, 100000),
V.Required('page', default=1): V.All(int, V.Range(min=1, max=sys.maxint))
})
def _execute(self, params):
generated_sql, query_params = self._generate_sql(params)
job_count = self._db_get('''
SELECT COUNT(*) AS count
FROM jobs
WHERE %(job_id_predicate)s
''' % generated_sql, **query_params).count
if job_count == 0:
raise exceptions.ApiException('No job found with id `%s`' % params['job_id'])
elif job_count > 1:
raise exceptions.ApiException('More than one job matches id `%s`, try using a more specific prefix' % params['job_id'])
rows = self._db_query('''
SELECT
tasks.*,
%(state_projection)s AS state
FROM tasks INNER JOIN jobs ON jobs.id = tasks.job_id
WHERE
%(job_id_predicate)s
%(state_predicate)s
<|code_end|>
. Use current file imports:
from memsql_loader.api.base import Api
from memsql_loader.api.validation import V, validate_enum, listor
from memsql_loader.api import shared, exceptions
import sys
and context (classes, functions, or code) from other files:
# Path: memsql_loader/api/base.py
# class Api(object):
# name = None
#
# def __init__(self):
# self.logger = log.get_logger(self.name or 'api')
# self.storage = LoaderStorage()
#
# def query(self, params):
# assert 'validate' in dir(self), '`validate` must be defined'
# return self._execute(self.validate(params))
#
# def _execute(self, params):
# raise NotImplemented()
#
# def __db_caller(self, callback):
# with self.storage.transaction() as cursor:
# return callback(cursor)
#
# def _db_query(self, *args, **kwargs):
# return self.__db_caller(lambda c: apsw_helpers.query(c, *args, **kwargs))
#
# def _db_custom_query(self, callback):
# return self.__db_caller(callback)
#
# def _db_get(self, *args, **kwargs):
# return self.__db_caller(lambda c: apsw_helpers.get(c, *args, **kwargs))
#
# Path: memsql_loader/api/validation.py
# def listor(sub_validator):
# def _validate(value):
# def validate_enum(EnumType):
# def _validate(value):
#
# Path: memsql_loader/api/shared.py
# TASKS_TTL = 120
# QUEUED = SuperEnum.E
# RUNNING = SuperEnum.E
# SUCCESS = SuperEnum.E
# ERROR = SuperEnum.E
# CANCELLED = SuperEnum.E
# SUCCESS_CONDITION = 'tasks.result = \'success\''
# ERROR_CONDITION = 'tasks.result = \'error\''
# CANCELLED_CONDITION = 'tasks.result = \'cancelled\''
# FINISHED_CONDITION = 'tasks.finished IS NOT NULL'
# QUEUED_CONDITION = 'tasks.finished IS NULL AND (tasks.execution_id IS NULL OR tasks.last_contact <= datetime(:now, "unixepoch", "-%s second"))' % TASKS_TTL
# PROJECTION = re.sub(r'\s+', ' ', """
# (CASE
# WHEN (%s) THEN UPPER(tasks.result)
# WHEN (%s) THEN 'QUEUED'
# ELSE 'RUNNING'
# END)
# """ % (FINISHED_CONDITION, QUEUED_CONDITION)).strip()
# QUEUED = SuperEnum.E
# RUNNING = SuperEnum.E
# FINISHED = SuperEnum.E
# CANCELLED = SuperEnum.E
# PROJECTION = re.sub(r'\s+', ' ', '''
# (CASE
# WHEN (
# (job_tasks.tasks_total - job_tasks.tasks_finished) = 0
# AND job_tasks.tasks_cancelled > 0) THEN 'CANCELLED'
# WHEN (
# job_tasks.tasks_total IS NULL
# OR job_tasks.tasks_finished = job_tasks.tasks_total) THEN 'FINISHED'
# WHEN (job_tasks.tasks_queued = job_tasks.tasks_total) THEN 'QUEUED'
# ELSE 'RUNNING'
# END)
# ''').strip()
# DESC = SuperEnum.E
# ASC = SuperEnum.E
# class TaskState(SuperEnum):
# class JobState(SuperEnum):
# class SortDirection(SuperEnum):
# def projection_params():
# def task_load_row(row):
# def job_load_row(row):
#
# Path: memsql_loader/api/exceptions.py
# class ApiException(Exception):
# class DBConnectionIssue(ApiException):
# class DBError(ApiException):
# def __str__(self):
# def __init__(self, *args):
# def __str__(self):
. Output only the next line. | ORDER BY %(order_by)s %(order)s |
Predict the next line for this snippet: <|code_start|>
class Tasks(Api):
SORTABLE_COLUMNS = ['id', 'key_name', 'created', 'started', 'finished', 'state', 'error_msg']
validate = V.Schema({
V.Required('job_id'): basestring,
'state': listor(validate_enum(shared.TaskState)),
V.Required('order', default=shared.SortDirection.ASC): validate_enum(shared.SortDirection),
V.Required('order_by', default='id'): V.Any(*SORTABLE_COLUMNS),
'page_size': V.Range(1, 100000),
V.Required('page', default=1): V.All(int, V.Range(min=1, max=sys.maxint))
})
def _execute(self, params):
generated_sql, query_params = self._generate_sql(params)
job_count = self._db_get('''
SELECT COUNT(*) AS count
FROM jobs
WHERE %(job_id_predicate)s
''' % generated_sql, **query_params).count
if job_count == 0:
raise exceptions.ApiException('No job found with id `%s`' % params['job_id'])
elif job_count > 1:
raise exceptions.ApiException('More than one job matches id `%s`, try using a more specific prefix' % params['job_id'])
rows = self._db_query('''
SELECT
<|code_end|>
with the help of current file imports:
from memsql_loader.api.base import Api
from memsql_loader.api.validation import V, validate_enum, listor
from memsql_loader.api import shared, exceptions
import sys
and context from other files:
# Path: memsql_loader/api/base.py
# class Api(object):
# name = None
#
# def __init__(self):
# self.logger = log.get_logger(self.name or 'api')
# self.storage = LoaderStorage()
#
# def query(self, params):
# assert 'validate' in dir(self), '`validate` must be defined'
# return self._execute(self.validate(params))
#
# def _execute(self, params):
# raise NotImplemented()
#
# def __db_caller(self, callback):
# with self.storage.transaction() as cursor:
# return callback(cursor)
#
# def _db_query(self, *args, **kwargs):
# return self.__db_caller(lambda c: apsw_helpers.query(c, *args, **kwargs))
#
# def _db_custom_query(self, callback):
# return self.__db_caller(callback)
#
# def _db_get(self, *args, **kwargs):
# return self.__db_caller(lambda c: apsw_helpers.get(c, *args, **kwargs))
#
# Path: memsql_loader/api/validation.py
# def listor(sub_validator):
# def _validate(value):
# def validate_enum(EnumType):
# def _validate(value):
#
# Path: memsql_loader/api/shared.py
# TASKS_TTL = 120
# QUEUED = SuperEnum.E
# RUNNING = SuperEnum.E
# SUCCESS = SuperEnum.E
# ERROR = SuperEnum.E
# CANCELLED = SuperEnum.E
# SUCCESS_CONDITION = 'tasks.result = \'success\''
# ERROR_CONDITION = 'tasks.result = \'error\''
# CANCELLED_CONDITION = 'tasks.result = \'cancelled\''
# FINISHED_CONDITION = 'tasks.finished IS NOT NULL'
# QUEUED_CONDITION = 'tasks.finished IS NULL AND (tasks.execution_id IS NULL OR tasks.last_contact <= datetime(:now, "unixepoch", "-%s second"))' % TASKS_TTL
# PROJECTION = re.sub(r'\s+', ' ', """
# (CASE
# WHEN (%s) THEN UPPER(tasks.result)
# WHEN (%s) THEN 'QUEUED'
# ELSE 'RUNNING'
# END)
# """ % (FINISHED_CONDITION, QUEUED_CONDITION)).strip()
# QUEUED = SuperEnum.E
# RUNNING = SuperEnum.E
# FINISHED = SuperEnum.E
# CANCELLED = SuperEnum.E
# PROJECTION = re.sub(r'\s+', ' ', '''
# (CASE
# WHEN (
# (job_tasks.tasks_total - job_tasks.tasks_finished) = 0
# AND job_tasks.tasks_cancelled > 0) THEN 'CANCELLED'
# WHEN (
# job_tasks.tasks_total IS NULL
# OR job_tasks.tasks_finished = job_tasks.tasks_total) THEN 'FINISHED'
# WHEN (job_tasks.tasks_queued = job_tasks.tasks_total) THEN 'QUEUED'
# ELSE 'RUNNING'
# END)
# ''').strip()
# DESC = SuperEnum.E
# ASC = SuperEnum.E
# class TaskState(SuperEnum):
# class JobState(SuperEnum):
# class SortDirection(SuperEnum):
# def projection_params():
# def task_load_row(row):
# def job_load_row(row):
#
# Path: memsql_loader/api/exceptions.py
# class ApiException(Exception):
# class DBConnectionIssue(ApiException):
# class DBError(ApiException):
# def __str__(self):
# def __init__(self, *args):
# def __str__(self):
, which may contain function names, class names, or code. Output only the next line. | tasks.*, |
Next line prediction: <|code_start|>
class Command(object):
def __init__(self, options):
self.options = options
self.ensure_bootstrapped()
self.run()
@staticmethod
def configure(parser, subparsers):
raise NotImplemented('Every command needs a static configure(...) method')
def ensure_bootstrapped(self):
if not bootstrap.check_bootstrapped():
bootstrap.bootstrap()
<|code_end|>
. Use current file imports:
(from memsql_loader.util import bootstrap)
and context including class names, function names, or small code snippets from other files:
# Path: memsql_loader/util/bootstrap.py
# def bootstrap(force=False):
# logger = log.get_logger('Bootstrap') # noqa
#
# def write_log(title, name, msg):
# log_title_width = 28
# title = ("%s [%s]: " % (title, name)).rjust(log_title_width, ' ')
# logger.info(title + msg)
#
# write_log('Database', storage.MEMSQL_LOADER_DB, 'Checking...')
# if force:
# write_log('Database', storage.MEMSQL_LOADER_DB, 'Dropping...')
# storage.LoaderStorage.drop_database()
# write_log('Database', storage.MEMSQL_LOADER_DB, 'Ready.')
#
# for Model in MODELS.values():
# instance = Model()
# if not instance.ready():
# write_log('Table', Model.__name__, 'Bootstrapping...')
# instance.setup()
# write_log('Table', Model.__name__, 'Ready.')
. Output only the next line. | def run(): |
Given the code snippet: <|code_start|>
class TableDefinition(object):
def __init__(self, table_name, sql, index_columns=None):
self.table_name = table_name
<|code_end|>
, generate the next line using the imports in this file:
from memsql_loader.util import apsw_helpers
and context (functions, classes, or occasionally code) from other files:
# Path: memsql_loader/util/apsw_helpers.py
# class _NoDefault(object):
# class _RowBase(object):
# class Row(_RowBase):
# class SelectResult(list):
# def __init__(self, fields, values):
# def get(self, name, default=_NoDefault):
# def set(self, name, value):
# def __getattr__(self, name):
# def __setattr__(self, name, value):
# def __getitem__(self, name):
# def __setitem__(self, name, value):
# def __contains__(self, name):
# def __iter__(self):
# def __len__(self):
# def keys(self):
# def values(self):
# def items(self):
# def __eq__(self, other):
# def __ne__(self, other):
# def as_dict(self):
# def for_json(self):
# def nope(self, *args, **kwargs):
# def __init__(self, fields, rows, is_rows=False, RowClass=Row):
# def width(self):
# def __getitem__(self, i):
# def query(cursor, query, *params, **kwparams):
# def get(cursor, query, *params, **kwparams):
. Output only the next line. | self.sql = sql |
Given the following code snippet before the placeholder: <|code_start|>
class DownloadMetrics(object):
def __init__(self, total_size):
self._total_size = total_size
self._current_size = 0
self._last_snapshot = 0
self._last_change = time.time()
self._snapshots = []
self._avg_len = 30
def accumulate_bytes(self, current):
self._current_size = current
self.snapshot()
@property
def last_change(self):
return self._last_change
@throttle(1, instance_method=True)
def ping(self):
self._last_change = time.time()
@throttle(1, instance_method=True)
def snapshot(self):
# Local copy made here to prevent race conditions
current = self._current_size
diff = current - self._last_snapshot
# update our last change time so long as our speed is above 10 bytes per
# second or we have only been downloading for 30 seconds
<|code_end|>
, predict the next line using imports from the current file:
import pycurl
import subprocess
import select
import sys
import threading
import time
import os
import zlib
import pywebhdfs.errors
from boto.exception import S3ResponseError
from boto.s3.connection import S3Connection
from memsql_loader.execution.errors import WorkerException, ConnectionException, RequeueTask
from memsql_loader.util import log, webhdfs
from wraptor.decorators import throttle
from memsql_loader.util.attr_dict import AttrDict
from memsql_loader.vendor import glob2
from pywebhdfs.webhdfs import PyWebHdfsClient
and context including class names, function names, and sometimes code from other files:
# Path: memsql_loader/execution/errors.py
# class WorkerException(Exception):
# def __init__(self, *args, **kwargs):
# super(Exception, self).__init__(*args, **kwargs)
# self.time = time.time()
#
# class ConnectionException(WorkerException):
# pass
#
# class RequeueTask(Exception):
# pass
#
# Path: memsql_loader/util/log.py
# def setup(log_path=None, stdout_enabled=True):
# def update_verbosity(debug=False, extra_verbose=False):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def configure(parser):
# def get_logger(name, level=None):
# def format(self, record):
# def __init__(self, name, level=logging.DEBUG):
# class _SetDebug(argparse.Action):
# class _SetStdout(argparse.Action):
# class _SetLogPath(argparse.Action):
# class Formatter(logging.Formatter):
# class Logger(logging.Logger):
#
# Path: memsql_loader/util/webhdfs.py
# def get_webhdfs_url(hdfs_host, webhdfs_port, hdfs_user, op, path):
#
# Path: memsql_loader/util/attr_dict.py
# class AttrDict(dict):
# def __getattr__(self, key):
# try:
# return self.__getitem__(key)
# except KeyError:
# # This lets you use dict-type attributes that aren't keys
# return getattr(super(AttrDict, self), key)
#
# def __setattr__(self, key, value):
# return self.__setitem__(key, value)
#
# def __str__(self):
# return self.__repr__()
#
# def __repr__(self):
# return '%s(%s)' % (self.__class__.__name__, dict.__repr__(self))
#
# @staticmethod
# def from_dict(source):
# def _transform(d):
# """ Turns a nested dict into nested AttrDict's """
# for k, v in d.iteritems():
# if isinstance(v, dict):
# d[k] = _transform(v)
# return AttrDict(d)
#
# return _transform(source)
. Output only the next line. | if diff > 10: |
Predict the next line for this snippet: <|code_start|>
DOWNLOAD_TIMEOUT = 30
SCRIPT_EXIT_TIMEOUT = 30
class DownloadMetrics(object):
def __init__(self, total_size):
self._total_size = total_size
self._current_size = 0
self._last_snapshot = 0
self._last_change = time.time()
self._snapshots = []
self._avg_len = 30
def accumulate_bytes(self, current):
self._current_size = current
self.snapshot()
@property
def last_change(self):
return self._last_change
@throttle(1, instance_method=True)
def ping(self):
self._last_change = time.time()
@throttle(1, instance_method=True)
def snapshot(self):
# Local copy made here to prevent race conditions
<|code_end|>
with the help of current file imports:
import pycurl
import subprocess
import select
import sys
import threading
import time
import os
import zlib
import pywebhdfs.errors
from boto.exception import S3ResponseError
from boto.s3.connection import S3Connection
from memsql_loader.execution.errors import WorkerException, ConnectionException, RequeueTask
from memsql_loader.util import log, webhdfs
from wraptor.decorators import throttle
from memsql_loader.util.attr_dict import AttrDict
from memsql_loader.vendor import glob2
from pywebhdfs.webhdfs import PyWebHdfsClient
and context from other files:
# Path: memsql_loader/execution/errors.py
# class WorkerException(Exception):
# def __init__(self, *args, **kwargs):
# super(Exception, self).__init__(*args, **kwargs)
# self.time = time.time()
#
# class ConnectionException(WorkerException):
# pass
#
# class RequeueTask(Exception):
# pass
#
# Path: memsql_loader/util/log.py
# def setup(log_path=None, stdout_enabled=True):
# def update_verbosity(debug=False, extra_verbose=False):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def configure(parser):
# def get_logger(name, level=None):
# def format(self, record):
# def __init__(self, name, level=logging.DEBUG):
# class _SetDebug(argparse.Action):
# class _SetStdout(argparse.Action):
# class _SetLogPath(argparse.Action):
# class Formatter(logging.Formatter):
# class Logger(logging.Logger):
#
# Path: memsql_loader/util/webhdfs.py
# def get_webhdfs_url(hdfs_host, webhdfs_port, hdfs_user, op, path):
#
# Path: memsql_loader/util/attr_dict.py
# class AttrDict(dict):
# def __getattr__(self, key):
# try:
# return self.__getitem__(key)
# except KeyError:
# # This lets you use dict-type attributes that aren't keys
# return getattr(super(AttrDict, self), key)
#
# def __setattr__(self, key, value):
# return self.__setitem__(key, value)
#
# def __str__(self):
# return self.__repr__()
#
# def __repr__(self):
# return '%s(%s)' % (self.__class__.__name__, dict.__repr__(self))
#
# @staticmethod
# def from_dict(source):
# def _transform(d):
# """ Turns a nested dict into nested AttrDict's """
# for k, v in d.iteritems():
# if isinstance(v, dict):
# d[k] = _transform(v)
# return AttrDict(d)
#
# return _transform(source)
, which may contain function names, class names, or code. Output only the next line. | current = self._current_size |
Based on the snippet: <|code_start|> # check that script hasn't errored before downloading
# NOTE: we wait here so that we can check if a script exits prematurely
# if this is the case, we fail the job without requeueing
time.sleep(1)
if self.script_proc.poll() is not None:
self.logger.error('Script `%s` exited prematurely with return code %d' % (self.job.spec.options.script, self.script_proc.returncode))
raise WorkerException('Script `%s` exited prematurely with return code %d' % (self.job.spec.options.script, self.script_proc.returncode))
# If we're piping data into a script and this file is
# a gzipped file, we'll decompress the data ourselves
# before piping it into the script.
if self.task.data['key_name'].endswith('.gz'):
# Set the window bits during decompression to
# zlib.MAX_WBITS | 32 tells the zlib library to
# automatically detect gzip headers.
self.decompress_obj = zlib.decompressobj(zlib.MAX_WBITS | 32)
curl.setopt(pycurl.WRITEFUNCTION, self._write_to_fifo(self.script_proc.stdin))
else:
curl.setopt(pycurl.WRITEFUNCTION, self._write_to_fifo(target_file))
if self.task.data['scheme'] == 'hdfs':
curl.setopt(pycurl.FOLLOWLOCATION, True)
self.logger.info('Starting download')
with self.task.protect():
self.task.start_step('download')
try:
curl.perform()
<|code_end|>
, predict the immediate next line with the help of imports:
import pycurl
import subprocess
import select
import sys
import threading
import time
import os
import zlib
import pywebhdfs.errors
from boto.exception import S3ResponseError
from boto.s3.connection import S3Connection
from memsql_loader.execution.errors import WorkerException, ConnectionException, RequeueTask
from memsql_loader.util import log, webhdfs
from wraptor.decorators import throttle
from memsql_loader.util.attr_dict import AttrDict
from memsql_loader.vendor import glob2
from pywebhdfs.webhdfs import PyWebHdfsClient
and context (classes, functions, sometimes code) from other files:
# Path: memsql_loader/execution/errors.py
# class WorkerException(Exception):
# def __init__(self, *args, **kwargs):
# super(Exception, self).__init__(*args, **kwargs)
# self.time = time.time()
#
# class ConnectionException(WorkerException):
# pass
#
# class RequeueTask(Exception):
# pass
#
# Path: memsql_loader/util/log.py
# def setup(log_path=None, stdout_enabled=True):
# def update_verbosity(debug=False, extra_verbose=False):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def configure(parser):
# def get_logger(name, level=None):
# def format(self, record):
# def __init__(self, name, level=logging.DEBUG):
# class _SetDebug(argparse.Action):
# class _SetStdout(argparse.Action):
# class _SetLogPath(argparse.Action):
# class Formatter(logging.Formatter):
# class Logger(logging.Logger):
#
# Path: memsql_loader/util/webhdfs.py
# def get_webhdfs_url(hdfs_host, webhdfs_port, hdfs_user, op, path):
#
# Path: memsql_loader/util/attr_dict.py
# class AttrDict(dict):
# def __getattr__(self, key):
# try:
# return self.__getitem__(key)
# except KeyError:
# # This lets you use dict-type attributes that aren't keys
# return getattr(super(AttrDict, self), key)
#
# def __setattr__(self, key, value):
# return self.__setitem__(key, value)
#
# def __str__(self):
# return self.__repr__()
#
# def __repr__(self):
# return '%s(%s)' % (self.__class__.__name__, dict.__repr__(self))
#
# @staticmethod
# def from_dict(source):
# def _transform(d):
# """ Turns a nested dict into nested AttrDict's """
# for k, v in d.iteritems():
# if isinstance(v, dict):
# d[k] = _transform(v)
# return AttrDict(d)
#
# return _transform(source)
. Output only the next line. | status_code = curl.getinfo(pycurl.HTTP_CODE) |
Continue the code snippet: <|code_start|> # emit a KILL QUERY due to fifo.open()
# if we are piping through a script, the fifo should block
# because the downloader is polling the script's stdin instead
# of the fifo
blocking = self.job.spec.options.script is not None
with self.fifo.open(blocking=blocking) as target_file:
# allocate an URL for the target file
if self.task.data['scheme'] == 's3':
if self.is_anonymous:
key_url = 'http://%(bucket)s.s3.amazonaws.com/%(path)s' % {
'bucket': self.key.bucket.name,
'path': self.key.name.encode('utf-8')
}
else:
key_url = self.key.generate_url(expires_in=3600)
elif self.task.data['scheme'] == 'hdfs':
host = self.job.spec.source.hdfs_host
port = self.job.spec.source.webhdfs_port
hdfs_user = self.job.spec.source.hdfs_user
key_name = self.key.name
key_url = webhdfs.get_webhdfs_url(
host, port, hdfs_user, 'OPEN', key_name)
elif self.task.data['scheme'] == 'file':
key_url = 'file://%(path)s' % {'path': self.key.name}
else:
assert False, 'Unsupported job with paths: %s' % [ str(p) for p in self.job.paths ]
self._curl = curl = pycurl.Curl()
curl.setopt(pycurl.URL, key_url)
<|code_end|>
. Use current file imports:
import pycurl
import subprocess
import select
import sys
import threading
import time
import os
import zlib
import pywebhdfs.errors
from boto.exception import S3ResponseError
from boto.s3.connection import S3Connection
from memsql_loader.execution.errors import WorkerException, ConnectionException, RequeueTask
from memsql_loader.util import log, webhdfs
from wraptor.decorators import throttle
from memsql_loader.util.attr_dict import AttrDict
from memsql_loader.vendor import glob2
from pywebhdfs.webhdfs import PyWebHdfsClient
and context (classes, functions, or code) from other files:
# Path: memsql_loader/execution/errors.py
# class WorkerException(Exception):
# def __init__(self, *args, **kwargs):
# super(Exception, self).__init__(*args, **kwargs)
# self.time = time.time()
#
# class ConnectionException(WorkerException):
# pass
#
# class RequeueTask(Exception):
# pass
#
# Path: memsql_loader/util/log.py
# def setup(log_path=None, stdout_enabled=True):
# def update_verbosity(debug=False, extra_verbose=False):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def configure(parser):
# def get_logger(name, level=None):
# def format(self, record):
# def __init__(self, name, level=logging.DEBUG):
# class _SetDebug(argparse.Action):
# class _SetStdout(argparse.Action):
# class _SetLogPath(argparse.Action):
# class Formatter(logging.Formatter):
# class Logger(logging.Logger):
#
# Path: memsql_loader/util/webhdfs.py
# def get_webhdfs_url(hdfs_host, webhdfs_port, hdfs_user, op, path):
#
# Path: memsql_loader/util/attr_dict.py
# class AttrDict(dict):
# def __getattr__(self, key):
# try:
# return self.__getitem__(key)
# except KeyError:
# # This lets you use dict-type attributes that aren't keys
# return getattr(super(AttrDict, self), key)
#
# def __setattr__(self, key, value):
# return self.__setitem__(key, value)
#
# def __str__(self):
# return self.__repr__()
#
# def __repr__(self):
# return '%s(%s)' % (self.__class__.__name__, dict.__repr__(self))
#
# @staticmethod
# def from_dict(source):
# def _transform(d):
# """ Turns a nested dict into nested AttrDict's """
# for k, v in d.iteritems():
# if isinstance(v, dict):
# d[k] = _transform(v)
# return AttrDict(d)
#
# return _transform(source)
. Output only the next line. | curl.setopt(pycurl.NOPROGRESS, 0) |
Predict the next line after this snippet: <|code_start|> # HTTP client errors will cause task failure (no retry)
if status_code >= 400 and status_code < 500:
raise WorkerException('HTTP status code %s for file %s' % (status_code, self.key.name))
# HTTP server errors will cause task retry
elif status_code >= 500:
self.logger.warn('Received HTTP status code %s for file %s, requeueing' % (status_code, self.key.name))
raise RequeueTask()
# If we're piping data through a script, catch timeouts and return codes
if self.script_proc is not None:
self.script_proc.stdin.close()
for i in range(SCRIPT_EXIT_TIMEOUT):
if self.script_proc.poll() is not None:
break
time.sleep(1)
else:
self.logger.error('Script `%s` failed to exit...killing' % self.job.spec.options.script)
self.script_proc.kill()
raise WorkerException('Script `%s` failed to exit after %d seconds' % (self.job.spec.options.script, SCRIPT_EXIT_TIMEOUT))
if self.script_proc.returncode != 0:
self.logger.error('Script `%s` exited with return code %d' % (self.job.spec.options.script, self.script_proc.returncode))
raise WorkerException('Script `%s` exited with return code %d' % (self.job.spec.options.script, self.script_proc.returncode))
finally:
with self.task.protect():
self.task.stop_step('download')
if self.script_proc is not None and self.script_proc.returncode is None:
try:
<|code_end|>
using the current file's imports:
import pycurl
import subprocess
import select
import sys
import threading
import time
import os
import zlib
import pywebhdfs.errors
from boto.exception import S3ResponseError
from boto.s3.connection import S3Connection
from memsql_loader.execution.errors import WorkerException, ConnectionException, RequeueTask
from memsql_loader.util import log, webhdfs
from wraptor.decorators import throttle
from memsql_loader.util.attr_dict import AttrDict
from memsql_loader.vendor import glob2
from pywebhdfs.webhdfs import PyWebHdfsClient
and any relevant context from other files:
# Path: memsql_loader/execution/errors.py
# class WorkerException(Exception):
# def __init__(self, *args, **kwargs):
# super(Exception, self).__init__(*args, **kwargs)
# self.time = time.time()
#
# class ConnectionException(WorkerException):
# pass
#
# class RequeueTask(Exception):
# pass
#
# Path: memsql_loader/util/log.py
# def setup(log_path=None, stdout_enabled=True):
# def update_verbosity(debug=False, extra_verbose=False):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def configure(parser):
# def get_logger(name, level=None):
# def format(self, record):
# def __init__(self, name, level=logging.DEBUG):
# class _SetDebug(argparse.Action):
# class _SetStdout(argparse.Action):
# class _SetLogPath(argparse.Action):
# class Formatter(logging.Formatter):
# class Logger(logging.Logger):
#
# Path: memsql_loader/util/webhdfs.py
# def get_webhdfs_url(hdfs_host, webhdfs_port, hdfs_user, op, path):
#
# Path: memsql_loader/util/attr_dict.py
# class AttrDict(dict):
# def __getattr__(self, key):
# try:
# return self.__getitem__(key)
# except KeyError:
# # This lets you use dict-type attributes that aren't keys
# return getattr(super(AttrDict, self), key)
#
# def __setattr__(self, key, value):
# return self.__setitem__(key, value)
#
# def __str__(self):
# return self.__repr__()
#
# def __repr__(self):
# return '%s(%s)' % (self.__class__.__name__, dict.__repr__(self))
#
# @staticmethod
# def from_dict(source):
# def _transform(d):
# """ Turns a nested dict into nested AttrDict's """
# for k, v in d.iteritems():
# if isinstance(v, dict):
# d[k] = _transform(v)
# return AttrDict(d)
#
# return _transform(source)
. Output only the next line. | self.script_proc.kill() |
Given the code snippet: <|code_start|> if diff > 10:
self.ping()
self._snapshots.append(diff)
if len(self._snapshots) > self._avg_len:
self._snapshots = self._snapshots[(-1 * self._avg_len):]
self._last_snapshot = current
def get_stats(self):
if self._current_size == self._total_size or len(self._snapshots) == 0:
rate = 0
else:
# Not super efficient, but shouldn't really matter with max 30 values
rate = sum(self._snapshots) / len(self._snapshots)
time_left = -1 if rate == 0 else (self._total_size - self._current_size) / rate
return {
'bytes_downloaded': self._current_size,
'download_rate': rate,
'time_left': time_left
}
class Downloader(threading.Thread):
def __init__(self):
super(Downloader, self).__init__()
self.logger = log.get_logger('downloader')
self._error = None
self._tb = None
<|code_end|>
, generate the next line using the imports in this file:
import pycurl
import subprocess
import select
import sys
import threading
import time
import os
import zlib
import pywebhdfs.errors
from boto.exception import S3ResponseError
from boto.s3.connection import S3Connection
from memsql_loader.execution.errors import WorkerException, ConnectionException, RequeueTask
from memsql_loader.util import log, webhdfs
from wraptor.decorators import throttle
from memsql_loader.util.attr_dict import AttrDict
from memsql_loader.vendor import glob2
from pywebhdfs.webhdfs import PyWebHdfsClient
and context (functions, classes, or occasionally code) from other files:
# Path: memsql_loader/execution/errors.py
# class WorkerException(Exception):
# def __init__(self, *args, **kwargs):
# super(Exception, self).__init__(*args, **kwargs)
# self.time = time.time()
#
# class ConnectionException(WorkerException):
# pass
#
# class RequeueTask(Exception):
# pass
#
# Path: memsql_loader/util/log.py
# def setup(log_path=None, stdout_enabled=True):
# def update_verbosity(debug=False, extra_verbose=False):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def configure(parser):
# def get_logger(name, level=None):
# def format(self, record):
# def __init__(self, name, level=logging.DEBUG):
# class _SetDebug(argparse.Action):
# class _SetStdout(argparse.Action):
# class _SetLogPath(argparse.Action):
# class Formatter(logging.Formatter):
# class Logger(logging.Logger):
#
# Path: memsql_loader/util/webhdfs.py
# def get_webhdfs_url(hdfs_host, webhdfs_port, hdfs_user, op, path):
#
# Path: memsql_loader/util/attr_dict.py
# class AttrDict(dict):
# def __getattr__(self, key):
# try:
# return self.__getitem__(key)
# except KeyError:
# # This lets you use dict-type attributes that aren't keys
# return getattr(super(AttrDict, self), key)
#
# def __setattr__(self, key, value):
# return self.__setitem__(key, value)
#
# def __str__(self):
# return self.__repr__()
#
# def __repr__(self):
# return '%s(%s)' % (self.__class__.__name__, dict.__repr__(self))
#
# @staticmethod
# def from_dict(source):
# def _transform(d):
# """ Turns a nested dict into nested AttrDict's """
# for k, v in d.iteritems():
# if isinstance(v, dict):
# d[k] = _transform(v)
# return AttrDict(d)
#
# return _transform(source)
. Output only the next line. | self._should_exit = False |
Predict the next line after this snippet: <|code_start|>
if log_path is not None:
# mark that we are joining the file log
try:
with open(log_path, 'a') as logfile:
logfile.write('Log file opened by %s\n' % os.getpid())
_file_handler = logging.FileHandler(filename=log_path)
format_str = "%(asctime)s %(levelname)s | %(process)d:%(name)s | %(message)s"
formatter = logging.Formatter(format_str)
_file_handler.setFormatter(formatter)
except IOError:
# we can't write a log file here
pass
def update_verbosity(debug=False, extra_verbose=False):
stdout_level = logging.DEBUG if (debug or extra_verbose) else logging.INFO
if _stream_handler:
_stream_handler.setLevel(stdout_level)
if extra_verbose:
logging.setLoggerClass(Logger)
class _SetDebug(argparse.Action):
def __call__(self, parser, namespace, value, option_string=None):
global _debug
_debug = True
class _SetStdout(argparse.Action):
def __call__(self, parser, namespace, value, option_string=None):
global _disable_stdout
<|code_end|>
using the current file's imports:
import os
import logging
import argparse
from memsql_loader.util import paths
and any relevant context from other files:
# Path: memsql_loader/util/paths.py
# MEMSQL_LOADER_PATH_ENV = "MEMSQL_LOADER_DATA_DIRECTORY"
# def get_data_dir():
. Output only the next line. | _disable_stdout = True |
Given the following code snippet before the placeholder: <|code_start|>
def validate_file_id_column(conn, database, table, col_name):
# Load id column isn't required
if col_name is None:
return True
col = conn.get("SELECT COLUMN_TYPE FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA=%s AND TABLE_NAME=%s AND COLUMN_NAME=%s",
database, table, col_name)
if col is None:
return False
col_type = col.COLUMN_TYPE.lower()
return 'bigint' in col_type and 'unsigned' in col_type
def validate_database_table(conn, database, table):
db_row = conn.get("SELECT * FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME=%s", database)
tb_row = conn.get("SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA=%s AND TABLE_NAME=%s", database, table)
return db_row is not None, tb_row is not None
def try_kill_query(conn, query_id):
try:
conn.execute("KILL QUERY %d" % query_id)
except pool.MySQLError as (errno, _):
if errno != errorcodes.ER_NO_SUCH_THREAD:
raise
def try_kill_connection(conn, conn_id):
try:
conn.execute("KILL CONNECTION %d" % conn_id)
except pool.MySQLError as (errno, _):
<|code_end|>
, predict the next line using imports from the current file:
from memsql.common import errorcodes
from memsql_loader.db import pool
and context including class names, function names, and sometimes code from other files:
# Path: memsql_loader/db/pool.py
# _POOL = connection_pool.ConnectionPool()
# _POOL = connection_pool.ConnectionPool()
# def recreate_pool():
# def close_connections():
# def get_connection(host, port, database, user, password, pooled=True, **kwargs):
. Output only the next line. | if errno != errorcodes.ER_NO_SUCH_THREAD: |
Predict the next line for this snippet: <|code_start|>
class Api(object):
name = None
def __init__(self):
self.logger = log.get_logger(self.name or 'api')
self.storage = LoaderStorage()
def query(self, params):
assert 'validate' in dir(self), '`validate` must be defined'
return self._execute(self.validate(params))
<|code_end|>
with the help of current file imports:
from memsql_loader.util import apsw_helpers, log
from memsql_loader.loader_db.storage import LoaderStorage
and context from other files:
# Path: memsql_loader/util/apsw_helpers.py
# class _NoDefault(object):
# class _RowBase(object):
# class Row(_RowBase):
# class SelectResult(list):
# def __init__(self, fields, values):
# def get(self, name, default=_NoDefault):
# def set(self, name, value):
# def __getattr__(self, name):
# def __setattr__(self, name, value):
# def __getitem__(self, name):
# def __setitem__(self, name, value):
# def __contains__(self, name):
# def __iter__(self):
# def __len__(self):
# def keys(self):
# def values(self):
# def items(self):
# def __eq__(self, other):
# def __ne__(self, other):
# def as_dict(self):
# def for_json(self):
# def nope(self, *args, **kwargs):
# def __init__(self, fields, rows, is_rows=False, RowClass=Row):
# def width(self):
# def __getitem__(self, i):
# def query(cursor, query, *params, **kwparams):
# def get(cursor, query, *params, **kwparams):
#
# Path: memsql_loader/util/log.py
# def setup(log_path=None, stdout_enabled=True):
# def update_verbosity(debug=False, extra_verbose=False):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def configure(parser):
# def get_logger(name, level=None):
# def format(self, record):
# def __init__(self, name, level=logging.DEBUG):
# class _SetDebug(argparse.Action):
# class _SetStdout(argparse.Action):
# class _SetLogPath(argparse.Action):
# class Formatter(logging.Formatter):
# class Logger(logging.Logger):
#
# Path: memsql_loader/loader_db/storage.py
# class LoaderStorage(APSWStorage):
# _instance = None
# _initialized = False
# _instance_lock = multiprocessing.RLock()
#
# # We use LoaderStorage as a singleton.
# def __new__(cls, *args, **kwargs):
# with cls._instance_lock:
# if cls._instance is None:
# cls._instance = super(LoaderStorage, cls).__new__(
# cls, *args, **kwargs)
# cls._initialized = False
# return cls._instance
#
# @classmethod
# def drop_database(cls):
# with cls._instance_lock:
# if os.path.isfile(get_loader_db_path()):
# os.remove(get_loader_db_path())
# if os.path.isfile(get_loader_db_path() + '-shm'):
# os.remove(get_loader_db_path() + '-shm')
# if os.path.isfile(get_loader_db_path() + '-wal'):
# os.remove(get_loader_db_path() + '-wal')
# cls._instance = None
#
# @classmethod
# @contextlib.contextmanager
# def fork_wrapper(cls):
# # This context manager should be used around any code that forks new
# # processes that will use a LoaderStorage object (e.g. Worker objects).
# # This ensures that we don't share SQLite connections across forked
# # processes.
# with cls._instance_lock:
# if cls._instance is not None:
# cls._instance.close_connections()
# # We garbage collect here to clean up any SQLite objects we
# # may have missed; this is important because any surviving
# # objects post-fork will mess up SQLite connections in the
# # child process. We use generation=2 to collect as many
# # objects as possible.
# gc.collect(2)
# yield
# with cls._instance_lock:
# if cls._instance is not None:
# cls._instance.setup_connections()
#
# def __init__(self):
# with LoaderStorage._instance_lock:
# # Since this is a singleton object, we don't want to call the
# # parent object's __init__ if we've already instantiated this
# # object in __new__. However, we may have closed this object's
# # connections in fork_wrapper above; in that case, we want to set
# # up new database connections.
# if not LoaderStorage._initialized:
# super(LoaderStorage, self).__init__(get_loader_db_path())
# LoaderStorage._initialized = True
# return
# elif not self._db or not self._db_t:
# self.setup_connections()
, which may contain function names, class names, or code. Output only the next line. | def _execute(self, params): |
Based on the snippet: <|code_start|>
class Api(object):
name = None
def __init__(self):
self.logger = log.get_logger(self.name or 'api')
self.storage = LoaderStorage()
def query(self, params):
assert 'validate' in dir(self), '`validate` must be defined'
return self._execute(self.validate(params))
def _execute(self, params):
raise NotImplemented()
def __db_caller(self, callback):
with self.storage.transaction() as cursor:
<|code_end|>
, predict the immediate next line with the help of imports:
from memsql_loader.util import apsw_helpers, log
from memsql_loader.loader_db.storage import LoaderStorage
and context (classes, functions, sometimes code) from other files:
# Path: memsql_loader/util/apsw_helpers.py
# class _NoDefault(object):
# class _RowBase(object):
# class Row(_RowBase):
# class SelectResult(list):
# def __init__(self, fields, values):
# def get(self, name, default=_NoDefault):
# def set(self, name, value):
# def __getattr__(self, name):
# def __setattr__(self, name, value):
# def __getitem__(self, name):
# def __setitem__(self, name, value):
# def __contains__(self, name):
# def __iter__(self):
# def __len__(self):
# def keys(self):
# def values(self):
# def items(self):
# def __eq__(self, other):
# def __ne__(self, other):
# def as_dict(self):
# def for_json(self):
# def nope(self, *args, **kwargs):
# def __init__(self, fields, rows, is_rows=False, RowClass=Row):
# def width(self):
# def __getitem__(self, i):
# def query(cursor, query, *params, **kwparams):
# def get(cursor, query, *params, **kwparams):
#
# Path: memsql_loader/util/log.py
# def setup(log_path=None, stdout_enabled=True):
# def update_verbosity(debug=False, extra_verbose=False):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def configure(parser):
# def get_logger(name, level=None):
# def format(self, record):
# def __init__(self, name, level=logging.DEBUG):
# class _SetDebug(argparse.Action):
# class _SetStdout(argparse.Action):
# class _SetLogPath(argparse.Action):
# class Formatter(logging.Formatter):
# class Logger(logging.Logger):
#
# Path: memsql_loader/loader_db/storage.py
# class LoaderStorage(APSWStorage):
# _instance = None
# _initialized = False
# _instance_lock = multiprocessing.RLock()
#
# # We use LoaderStorage as a singleton.
# def __new__(cls, *args, **kwargs):
# with cls._instance_lock:
# if cls._instance is None:
# cls._instance = super(LoaderStorage, cls).__new__(
# cls, *args, **kwargs)
# cls._initialized = False
# return cls._instance
#
# @classmethod
# def drop_database(cls):
# with cls._instance_lock:
# if os.path.isfile(get_loader_db_path()):
# os.remove(get_loader_db_path())
# if os.path.isfile(get_loader_db_path() + '-shm'):
# os.remove(get_loader_db_path() + '-shm')
# if os.path.isfile(get_loader_db_path() + '-wal'):
# os.remove(get_loader_db_path() + '-wal')
# cls._instance = None
#
# @classmethod
# @contextlib.contextmanager
# def fork_wrapper(cls):
# # This context manager should be used around any code that forks new
# # processes that will use a LoaderStorage object (e.g. Worker objects).
# # This ensures that we don't share SQLite connections across forked
# # processes.
# with cls._instance_lock:
# if cls._instance is not None:
# cls._instance.close_connections()
# # We garbage collect here to clean up any SQLite objects we
# # may have missed; this is important because any surviving
# # objects post-fork will mess up SQLite connections in the
# # child process. We use generation=2 to collect as many
# # objects as possible.
# gc.collect(2)
# yield
# with cls._instance_lock:
# if cls._instance is not None:
# cls._instance.setup_connections()
#
# def __init__(self):
# with LoaderStorage._instance_lock:
# # Since this is a singleton object, we don't want to call the
# # parent object's __init__ if we've already instantiated this
# # object in __new__. However, we may have closed this object's
# # connections in fork_wrapper above; in that case, we want to set
# # up new database connections.
# if not LoaderStorage._initialized:
# super(LoaderStorage, self).__init__(get_loader_db_path())
# LoaderStorage._initialized = True
# return
# elif not self._db or not self._db_t:
# self.setup_connections()
. Output only the next line. | return callback(cursor) |
Based on the snippet: <|code_start|>
class Api(object):
name = None
def __init__(self):
self.logger = log.get_logger(self.name or 'api')
self.storage = LoaderStorage()
def query(self, params):
assert 'validate' in dir(self), '`validate` must be defined'
return self._execute(self.validate(params))
def _execute(self, params):
raise NotImplemented()
def __db_caller(self, callback):
with self.storage.transaction() as cursor:
return callback(cursor)
def _db_query(self, *args, **kwargs):
return self.__db_caller(lambda c: apsw_helpers.query(c, *args, **kwargs))
<|code_end|>
, predict the immediate next line with the help of imports:
from memsql_loader.util import apsw_helpers, log
from memsql_loader.loader_db.storage import LoaderStorage
and context (classes, functions, sometimes code) from other files:
# Path: memsql_loader/util/apsw_helpers.py
# class _NoDefault(object):
# class _RowBase(object):
# class Row(_RowBase):
# class SelectResult(list):
# def __init__(self, fields, values):
# def get(self, name, default=_NoDefault):
# def set(self, name, value):
# def __getattr__(self, name):
# def __setattr__(self, name, value):
# def __getitem__(self, name):
# def __setitem__(self, name, value):
# def __contains__(self, name):
# def __iter__(self):
# def __len__(self):
# def keys(self):
# def values(self):
# def items(self):
# def __eq__(self, other):
# def __ne__(self, other):
# def as_dict(self):
# def for_json(self):
# def nope(self, *args, **kwargs):
# def __init__(self, fields, rows, is_rows=False, RowClass=Row):
# def width(self):
# def __getitem__(self, i):
# def query(cursor, query, *params, **kwparams):
# def get(cursor, query, *params, **kwparams):
#
# Path: memsql_loader/util/log.py
# def setup(log_path=None, stdout_enabled=True):
# def update_verbosity(debug=False, extra_verbose=False):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def configure(parser):
# def get_logger(name, level=None):
# def format(self, record):
# def __init__(self, name, level=logging.DEBUG):
# class _SetDebug(argparse.Action):
# class _SetStdout(argparse.Action):
# class _SetLogPath(argparse.Action):
# class Formatter(logging.Formatter):
# class Logger(logging.Logger):
#
# Path: memsql_loader/loader_db/storage.py
# class LoaderStorage(APSWStorage):
# _instance = None
# _initialized = False
# _instance_lock = multiprocessing.RLock()
#
# # We use LoaderStorage as a singleton.
# def __new__(cls, *args, **kwargs):
# with cls._instance_lock:
# if cls._instance is None:
# cls._instance = super(LoaderStorage, cls).__new__(
# cls, *args, **kwargs)
# cls._initialized = False
# return cls._instance
#
# @classmethod
# def drop_database(cls):
# with cls._instance_lock:
# if os.path.isfile(get_loader_db_path()):
# os.remove(get_loader_db_path())
# if os.path.isfile(get_loader_db_path() + '-shm'):
# os.remove(get_loader_db_path() + '-shm')
# if os.path.isfile(get_loader_db_path() + '-wal'):
# os.remove(get_loader_db_path() + '-wal')
# cls._instance = None
#
# @classmethod
# @contextlib.contextmanager
# def fork_wrapper(cls):
# # This context manager should be used around any code that forks new
# # processes that will use a LoaderStorage object (e.g. Worker objects).
# # This ensures that we don't share SQLite connections across forked
# # processes.
# with cls._instance_lock:
# if cls._instance is not None:
# cls._instance.close_connections()
# # We garbage collect here to clean up any SQLite objects we
# # may have missed; this is important because any surviving
# # objects post-fork will mess up SQLite connections in the
# # child process. We use generation=2 to collect as many
# # objects as possible.
# gc.collect(2)
# yield
# with cls._instance_lock:
# if cls._instance is not None:
# cls._instance.setup_connections()
#
# def __init__(self):
# with LoaderStorage._instance_lock:
# # Since this is a singleton object, we don't want to call the
# # parent object's __init__ if we've already instantiated this
# # object in __new__. However, we may have closed this object's
# # connections in fork_wrapper above; in that case, we want to set
# # up new database connections.
# if not LoaderStorage._initialized:
# super(LoaderStorage, self).__init__(get_loader_db_path())
# LoaderStorage._initialized = True
# return
# elif not self._db or not self._db_t:
# self.setup_connections()
. Output only the next line. | def _db_custom_query(self, callback): |
Predict the next line after this snippet: <|code_start|>
class Log(Command):
@staticmethod
def configure(parser, subparsers):
subparser = subparsers.add_parser('log', help='Tail the MemSQL Loader log file or print out it\'s path.')
subparser.set_defaults(command=Log)
<|code_end|>
using the current file's imports:
import sys
import os
from memsql_loader.util.command import Command
from memsql_loader.util import log
and any relevant context from other files:
# Path: memsql_loader/util/command.py
# class Command(object):
# def __init__(self, options):
# self.options = options
# self.ensure_bootstrapped()
# self.run()
#
# @staticmethod
# def configure(parser, subparsers):
# raise NotImplemented('Every command needs a static configure(...) method')
#
# def ensure_bootstrapped(self):
# if not bootstrap.check_bootstrapped():
# bootstrap.bootstrap()
#
# def run():
# raise NotImplemented('Every command needs a run method, otherwise not much is going to happen')
#
# Path: memsql_loader/util/log.py
# def setup(log_path=None, stdout_enabled=True):
# def update_verbosity(debug=False, extra_verbose=False):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def configure(parser):
# def get_logger(name, level=None):
# def format(self, record):
# def __init__(self, name, level=logging.DEBUG):
# class _SetDebug(argparse.Action):
# class _SetStdout(argparse.Action):
# class _SetLogPath(argparse.Action):
# class Formatter(logging.Formatter):
# class Logger(logging.Logger):
. Output only the next line. | subparser.add_argument('-p', '--path', default=False, action='store_true', |
Continue the code snippet: <|code_start|>
class Log(Command):
@staticmethod
def configure(parser, subparsers):
<|code_end|>
. Use current file imports:
import sys
import os
from memsql_loader.util.command import Command
from memsql_loader.util import log
and context (classes, functions, or code) from other files:
# Path: memsql_loader/util/command.py
# class Command(object):
# def __init__(self, options):
# self.options = options
# self.ensure_bootstrapped()
# self.run()
#
# @staticmethod
# def configure(parser, subparsers):
# raise NotImplemented('Every command needs a static configure(...) method')
#
# def ensure_bootstrapped(self):
# if not bootstrap.check_bootstrapped():
# bootstrap.bootstrap()
#
# def run():
# raise NotImplemented('Every command needs a run method, otherwise not much is going to happen')
#
# Path: memsql_loader/util/log.py
# def setup(log_path=None, stdout_enabled=True):
# def update_verbosity(debug=False, extra_verbose=False):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def configure(parser):
# def get_logger(name, level=None):
# def format(self, record):
# def __init__(self, name, level=logging.DEBUG):
# class _SetDebug(argparse.Action):
# class _SetStdout(argparse.Action):
# class _SetLogPath(argparse.Action):
# class Formatter(logging.Formatter):
# class Logger(logging.Logger):
. Output only the next line. | subparser = subparsers.add_parser('log', help='Tail the MemSQL Loader log file or print out it\'s path.') |
Next line prediction: <|code_start|>
class Task(Api):
validate = V.Schema({
V.Required('task_id'): int
})
def _execute(self, params):
generated_sql, query_params = self._generate_sql(params)
task_row = self._db_get('''
<|code_end|>
. Use current file imports:
(from memsql_loader.api.base import Api
from memsql_loader.api import exceptions
from memsql_loader.api.validation import V
from memsql_loader.api import shared)
and context including class names, function names, or small code snippets from other files:
# Path: memsql_loader/api/base.py
# class Api(object):
# name = None
#
# def __init__(self):
# self.logger = log.get_logger(self.name or 'api')
# self.storage = LoaderStorage()
#
# def query(self, params):
# assert 'validate' in dir(self), '`validate` must be defined'
# return self._execute(self.validate(params))
#
# def _execute(self, params):
# raise NotImplemented()
#
# def __db_caller(self, callback):
# with self.storage.transaction() as cursor:
# return callback(cursor)
#
# def _db_query(self, *args, **kwargs):
# return self.__db_caller(lambda c: apsw_helpers.query(c, *args, **kwargs))
#
# def _db_custom_query(self, callback):
# return self.__db_caller(callback)
#
# def _db_get(self, *args, **kwargs):
# return self.__db_caller(lambda c: apsw_helpers.get(c, *args, **kwargs))
#
# Path: memsql_loader/api/exceptions.py
# class ApiException(Exception):
# class DBConnectionIssue(ApiException):
# class DBError(ApiException):
# def __str__(self):
# def __init__(self, *args):
# def __str__(self):
#
# Path: memsql_loader/api/validation.py
# def listor(sub_validator):
# def _validate(value):
# def validate_enum(EnumType):
# def _validate(value):
#
# Path: memsql_loader/api/shared.py
# TASKS_TTL = 120
# QUEUED = SuperEnum.E
# RUNNING = SuperEnum.E
# SUCCESS = SuperEnum.E
# ERROR = SuperEnum.E
# CANCELLED = SuperEnum.E
# SUCCESS_CONDITION = 'tasks.result = \'success\''
# ERROR_CONDITION = 'tasks.result = \'error\''
# CANCELLED_CONDITION = 'tasks.result = \'cancelled\''
# FINISHED_CONDITION = 'tasks.finished IS NOT NULL'
# QUEUED_CONDITION = 'tasks.finished IS NULL AND (tasks.execution_id IS NULL OR tasks.last_contact <= datetime(:now, "unixepoch", "-%s second"))' % TASKS_TTL
# PROJECTION = re.sub(r'\s+', ' ', """
# (CASE
# WHEN (%s) THEN UPPER(tasks.result)
# WHEN (%s) THEN 'QUEUED'
# ELSE 'RUNNING'
# END)
# """ % (FINISHED_CONDITION, QUEUED_CONDITION)).strip()
# QUEUED = SuperEnum.E
# RUNNING = SuperEnum.E
# FINISHED = SuperEnum.E
# CANCELLED = SuperEnum.E
# PROJECTION = re.sub(r'\s+', ' ', '''
# (CASE
# WHEN (
# (job_tasks.tasks_total - job_tasks.tasks_finished) = 0
# AND job_tasks.tasks_cancelled > 0) THEN 'CANCELLED'
# WHEN (
# job_tasks.tasks_total IS NULL
# OR job_tasks.tasks_finished = job_tasks.tasks_total) THEN 'FINISHED'
# WHEN (job_tasks.tasks_queued = job_tasks.tasks_total) THEN 'QUEUED'
# ELSE 'RUNNING'
# END)
# ''').strip()
# DESC = SuperEnum.E
# ASC = SuperEnum.E
# class TaskState(SuperEnum):
# class JobState(SuperEnum):
# class SortDirection(SuperEnum):
# def projection_params():
# def task_load_row(row):
# def job_load_row(row):
. Output only the next line. | SELECT *, %(state_projection)s AS state |
Using the snippet: <|code_start|>
class Task(Api):
validate = V.Schema({
V.Required('task_id'): int
})
def _execute(self, params):
generated_sql, query_params = self._generate_sql(params)
task_row = self._db_get('''
SELECT *, %(state_projection)s AS state
FROM tasks
WHERE
<|code_end|>
, determine the next line of code. You have imports:
from memsql_loader.api.base import Api
from memsql_loader.api import exceptions
from memsql_loader.api.validation import V
from memsql_loader.api import shared
and context (class names, function names, or code) available:
# Path: memsql_loader/api/base.py
# class Api(object):
# name = None
#
# def __init__(self):
# self.logger = log.get_logger(self.name or 'api')
# self.storage = LoaderStorage()
#
# def query(self, params):
# assert 'validate' in dir(self), '`validate` must be defined'
# return self._execute(self.validate(params))
#
# def _execute(self, params):
# raise NotImplemented()
#
# def __db_caller(self, callback):
# with self.storage.transaction() as cursor:
# return callback(cursor)
#
# def _db_query(self, *args, **kwargs):
# return self.__db_caller(lambda c: apsw_helpers.query(c, *args, **kwargs))
#
# def _db_custom_query(self, callback):
# return self.__db_caller(callback)
#
# def _db_get(self, *args, **kwargs):
# return self.__db_caller(lambda c: apsw_helpers.get(c, *args, **kwargs))
#
# Path: memsql_loader/api/exceptions.py
# class ApiException(Exception):
# class DBConnectionIssue(ApiException):
# class DBError(ApiException):
# def __str__(self):
# def __init__(self, *args):
# def __str__(self):
#
# Path: memsql_loader/api/validation.py
# def listor(sub_validator):
# def _validate(value):
# def validate_enum(EnumType):
# def _validate(value):
#
# Path: memsql_loader/api/shared.py
# TASKS_TTL = 120
# QUEUED = SuperEnum.E
# RUNNING = SuperEnum.E
# SUCCESS = SuperEnum.E
# ERROR = SuperEnum.E
# CANCELLED = SuperEnum.E
# SUCCESS_CONDITION = 'tasks.result = \'success\''
# ERROR_CONDITION = 'tasks.result = \'error\''
# CANCELLED_CONDITION = 'tasks.result = \'cancelled\''
# FINISHED_CONDITION = 'tasks.finished IS NOT NULL'
# QUEUED_CONDITION = 'tasks.finished IS NULL AND (tasks.execution_id IS NULL OR tasks.last_contact <= datetime(:now, "unixepoch", "-%s second"))' % TASKS_TTL
# PROJECTION = re.sub(r'\s+', ' ', """
# (CASE
# WHEN (%s) THEN UPPER(tasks.result)
# WHEN (%s) THEN 'QUEUED'
# ELSE 'RUNNING'
# END)
# """ % (FINISHED_CONDITION, QUEUED_CONDITION)).strip()
# QUEUED = SuperEnum.E
# RUNNING = SuperEnum.E
# FINISHED = SuperEnum.E
# CANCELLED = SuperEnum.E
# PROJECTION = re.sub(r'\s+', ' ', '''
# (CASE
# WHEN (
# (job_tasks.tasks_total - job_tasks.tasks_finished) = 0
# AND job_tasks.tasks_cancelled > 0) THEN 'CANCELLED'
# WHEN (
# job_tasks.tasks_total IS NULL
# OR job_tasks.tasks_finished = job_tasks.tasks_total) THEN 'FINISHED'
# WHEN (job_tasks.tasks_queued = job_tasks.tasks_total) THEN 'QUEUED'
# ELSE 'RUNNING'
# END)
# ''').strip()
# DESC = SuperEnum.E
# ASC = SuperEnum.E
# class TaskState(SuperEnum):
# class JobState(SuperEnum):
# class SortDirection(SuperEnum):
# def projection_params():
# def task_load_row(row):
# def job_load_row(row):
. Output only the next line. | %(task_id_predicate)s |
Based on the snippet: <|code_start|>
class Task(Api):
validate = V.Schema({
V.Required('task_id'): int
})
<|code_end|>
, predict the immediate next line with the help of imports:
from memsql_loader.api.base import Api
from memsql_loader.api import exceptions
from memsql_loader.api.validation import V
from memsql_loader.api import shared
and context (classes, functions, sometimes code) from other files:
# Path: memsql_loader/api/base.py
# class Api(object):
# name = None
#
# def __init__(self):
# self.logger = log.get_logger(self.name or 'api')
# self.storage = LoaderStorage()
#
# def query(self, params):
# assert 'validate' in dir(self), '`validate` must be defined'
# return self._execute(self.validate(params))
#
# def _execute(self, params):
# raise NotImplemented()
#
# def __db_caller(self, callback):
# with self.storage.transaction() as cursor:
# return callback(cursor)
#
# def _db_query(self, *args, **kwargs):
# return self.__db_caller(lambda c: apsw_helpers.query(c, *args, **kwargs))
#
# def _db_custom_query(self, callback):
# return self.__db_caller(callback)
#
# def _db_get(self, *args, **kwargs):
# return self.__db_caller(lambda c: apsw_helpers.get(c, *args, **kwargs))
#
# Path: memsql_loader/api/exceptions.py
# class ApiException(Exception):
# class DBConnectionIssue(ApiException):
# class DBError(ApiException):
# def __str__(self):
# def __init__(self, *args):
# def __str__(self):
#
# Path: memsql_loader/api/validation.py
# def listor(sub_validator):
# def _validate(value):
# def validate_enum(EnumType):
# def _validate(value):
#
# Path: memsql_loader/api/shared.py
# TASKS_TTL = 120
# QUEUED = SuperEnum.E
# RUNNING = SuperEnum.E
# SUCCESS = SuperEnum.E
# ERROR = SuperEnum.E
# CANCELLED = SuperEnum.E
# SUCCESS_CONDITION = 'tasks.result = \'success\''
# ERROR_CONDITION = 'tasks.result = \'error\''
# CANCELLED_CONDITION = 'tasks.result = \'cancelled\''
# FINISHED_CONDITION = 'tasks.finished IS NOT NULL'
# QUEUED_CONDITION = 'tasks.finished IS NULL AND (tasks.execution_id IS NULL OR tasks.last_contact <= datetime(:now, "unixepoch", "-%s second"))' % TASKS_TTL
# PROJECTION = re.sub(r'\s+', ' ', """
# (CASE
# WHEN (%s) THEN UPPER(tasks.result)
# WHEN (%s) THEN 'QUEUED'
# ELSE 'RUNNING'
# END)
# """ % (FINISHED_CONDITION, QUEUED_CONDITION)).strip()
# QUEUED = SuperEnum.E
# RUNNING = SuperEnum.E
# FINISHED = SuperEnum.E
# CANCELLED = SuperEnum.E
# PROJECTION = re.sub(r'\s+', ' ', '''
# (CASE
# WHEN (
# (job_tasks.tasks_total - job_tasks.tasks_finished) = 0
# AND job_tasks.tasks_cancelled > 0) THEN 'CANCELLED'
# WHEN (
# job_tasks.tasks_total IS NULL
# OR job_tasks.tasks_finished = job_tasks.tasks_total) THEN 'FINISHED'
# WHEN (job_tasks.tasks_queued = job_tasks.tasks_total) THEN 'QUEUED'
# ELSE 'RUNNING'
# END)
# ''').strip()
# DESC = SuperEnum.E
# ASC = SuperEnum.E
# class TaskState(SuperEnum):
# class JobState(SuperEnum):
# class SortDirection(SuperEnum):
# def projection_params():
# def task_load_row(row):
# def job_load_row(row):
. Output only the next line. | def _execute(self, params): |
Predict the next line for this snippet: <|code_start|>
class Task(Api):
validate = V.Schema({
V.Required('task_id'): int
})
def _execute(self, params):
generated_sql, query_params = self._generate_sql(params)
task_row = self._db_get('''
SELECT *, %(state_projection)s AS state
FROM tasks
WHERE
%(task_id_predicate)s
<|code_end|>
with the help of current file imports:
from memsql_loader.api.base import Api
from memsql_loader.api import exceptions
from memsql_loader.api.validation import V
from memsql_loader.api import shared
and context from other files:
# Path: memsql_loader/api/base.py
# class Api(object):
# name = None
#
# def __init__(self):
# self.logger = log.get_logger(self.name or 'api')
# self.storage = LoaderStorage()
#
# def query(self, params):
# assert 'validate' in dir(self), '`validate` must be defined'
# return self._execute(self.validate(params))
#
# def _execute(self, params):
# raise NotImplemented()
#
# def __db_caller(self, callback):
# with self.storage.transaction() as cursor:
# return callback(cursor)
#
# def _db_query(self, *args, **kwargs):
# return self.__db_caller(lambda c: apsw_helpers.query(c, *args, **kwargs))
#
# def _db_custom_query(self, callback):
# return self.__db_caller(callback)
#
# def _db_get(self, *args, **kwargs):
# return self.__db_caller(lambda c: apsw_helpers.get(c, *args, **kwargs))
#
# Path: memsql_loader/api/exceptions.py
# class ApiException(Exception):
# class DBConnectionIssue(ApiException):
# class DBError(ApiException):
# def __str__(self):
# def __init__(self, *args):
# def __str__(self):
#
# Path: memsql_loader/api/validation.py
# def listor(sub_validator):
# def _validate(value):
# def validate_enum(EnumType):
# def _validate(value):
#
# Path: memsql_loader/api/shared.py
# TASKS_TTL = 120
# QUEUED = SuperEnum.E
# RUNNING = SuperEnum.E
# SUCCESS = SuperEnum.E
# ERROR = SuperEnum.E
# CANCELLED = SuperEnum.E
# SUCCESS_CONDITION = 'tasks.result = \'success\''
# ERROR_CONDITION = 'tasks.result = \'error\''
# CANCELLED_CONDITION = 'tasks.result = \'cancelled\''
# FINISHED_CONDITION = 'tasks.finished IS NOT NULL'
# QUEUED_CONDITION = 'tasks.finished IS NULL AND (tasks.execution_id IS NULL OR tasks.last_contact <= datetime(:now, "unixepoch", "-%s second"))' % TASKS_TTL
# PROJECTION = re.sub(r'\s+', ' ', """
# (CASE
# WHEN (%s) THEN UPPER(tasks.result)
# WHEN (%s) THEN 'QUEUED'
# ELSE 'RUNNING'
# END)
# """ % (FINISHED_CONDITION, QUEUED_CONDITION)).strip()
# QUEUED = SuperEnum.E
# RUNNING = SuperEnum.E
# FINISHED = SuperEnum.E
# CANCELLED = SuperEnum.E
# PROJECTION = re.sub(r'\s+', ' ', '''
# (CASE
# WHEN (
# (job_tasks.tasks_total - job_tasks.tasks_finished) = 0
# AND job_tasks.tasks_cancelled > 0) THEN 'CANCELLED'
# WHEN (
# job_tasks.tasks_total IS NULL
# OR job_tasks.tasks_finished = job_tasks.tasks_total) THEN 'FINISHED'
# WHEN (job_tasks.tasks_queued = job_tasks.tasks_total) THEN 'QUEUED'
# ELSE 'RUNNING'
# END)
# ''').strip()
# DESC = SuperEnum.E
# ASC = SuperEnum.E
# class TaskState(SuperEnum):
# class JobState(SuperEnum):
# class SortDirection(SuperEnum):
# def projection_params():
# def task_load_row(row):
# def job_load_row(row):
, which may contain function names, class names, or code. Output only the next line. | LIMIT 1 |
Given the following code snippet before the placeholder: <|code_start|>
class Job(Api):
name = 'Job'
validate = V.Schema({
V.Required('job_id'): basestring
})
def _execute(self, params):
<|code_end|>
, predict the next line using imports from the current file:
from memsql_loader.api.base import Api
from memsql_loader.api import shared, exceptions
from memsql_loader.api.validation import V
and context including class names, function names, and sometimes code from other files:
# Path: memsql_loader/api/base.py
# class Api(object):
# name = None
#
# def __init__(self):
# self.logger = log.get_logger(self.name or 'api')
# self.storage = LoaderStorage()
#
# def query(self, params):
# assert 'validate' in dir(self), '`validate` must be defined'
# return self._execute(self.validate(params))
#
# def _execute(self, params):
# raise NotImplemented()
#
# def __db_caller(self, callback):
# with self.storage.transaction() as cursor:
# return callback(cursor)
#
# def _db_query(self, *args, **kwargs):
# return self.__db_caller(lambda c: apsw_helpers.query(c, *args, **kwargs))
#
# def _db_custom_query(self, callback):
# return self.__db_caller(callback)
#
# def _db_get(self, *args, **kwargs):
# return self.__db_caller(lambda c: apsw_helpers.get(c, *args, **kwargs))
#
# Path: memsql_loader/api/shared.py
# TASKS_TTL = 120
# QUEUED = SuperEnum.E
# RUNNING = SuperEnum.E
# SUCCESS = SuperEnum.E
# ERROR = SuperEnum.E
# CANCELLED = SuperEnum.E
# SUCCESS_CONDITION = 'tasks.result = \'success\''
# ERROR_CONDITION = 'tasks.result = \'error\''
# CANCELLED_CONDITION = 'tasks.result = \'cancelled\''
# FINISHED_CONDITION = 'tasks.finished IS NOT NULL'
# QUEUED_CONDITION = 'tasks.finished IS NULL AND (tasks.execution_id IS NULL OR tasks.last_contact <= datetime(:now, "unixepoch", "-%s second"))' % TASKS_TTL
# PROJECTION = re.sub(r'\s+', ' ', """
# (CASE
# WHEN (%s) THEN UPPER(tasks.result)
# WHEN (%s) THEN 'QUEUED'
# ELSE 'RUNNING'
# END)
# """ % (FINISHED_CONDITION, QUEUED_CONDITION)).strip()
# QUEUED = SuperEnum.E
# RUNNING = SuperEnum.E
# FINISHED = SuperEnum.E
# CANCELLED = SuperEnum.E
# PROJECTION = re.sub(r'\s+', ' ', '''
# (CASE
# WHEN (
# (job_tasks.tasks_total - job_tasks.tasks_finished) = 0
# AND job_tasks.tasks_cancelled > 0) THEN 'CANCELLED'
# WHEN (
# job_tasks.tasks_total IS NULL
# OR job_tasks.tasks_finished = job_tasks.tasks_total) THEN 'FINISHED'
# WHEN (job_tasks.tasks_queued = job_tasks.tasks_total) THEN 'QUEUED'
# ELSE 'RUNNING'
# END)
# ''').strip()
# DESC = SuperEnum.E
# ASC = SuperEnum.E
# class TaskState(SuperEnum):
# class JobState(SuperEnum):
# class SortDirection(SuperEnum):
# def projection_params():
# def task_load_row(row):
# def job_load_row(row):
#
# Path: memsql_loader/api/exceptions.py
# class ApiException(Exception):
# class DBConnectionIssue(ApiException):
# class DBError(ApiException):
# def __str__(self):
# def __init__(self, *args):
# def __str__(self):
#
# Path: memsql_loader/api/validation.py
# def listor(sub_validator):
# def _validate(value):
# def validate_enum(EnumType):
# def _validate(value):
. Output only the next line. | generated_sql, query_params = self._generate_sql(params) |
Given the code snippet: <|code_start|>
class Job(Api):
name = 'Job'
validate = V.Schema({
V.Required('job_id'): basestring
})
def _execute(self, params):
generated_sql, query_params = self._generate_sql(params)
row = self._db_query("""
SELECT
id,
created,
spec,
IFNULL(tasks_total, 0) AS tasks_total,
IFNULL(tasks_queued, 0) AS tasks_queued,
-- Tasks that are cancelled count as finished also.
-- It is always true that if one is null, all are null
IFNULL(tasks_total - tasks_queued - tasks_finished, 0) AS tasks_running,
IFNULL(tasks_finished, 0) AS tasks_finished,
IFNULL(tasks_cancelled, 0) AS tasks_cancelled,
IFNULL(tasks_succeeded, 0) AS tasks_succeeded,
IFNULL(tasks_errored, 0) AS tasks_errored,
%(state_projection)s AS state
FROM
jobs
LEFT JOIN(
<|code_end|>
, generate the next line using the imports in this file:
from memsql_loader.api.base import Api
from memsql_loader.api import shared, exceptions
from memsql_loader.api.validation import V
and context (functions, classes, or occasionally code) from other files:
# Path: memsql_loader/api/base.py
# class Api(object):
# name = None
#
# def __init__(self):
# self.logger = log.get_logger(self.name or 'api')
# self.storage = LoaderStorage()
#
# def query(self, params):
# assert 'validate' in dir(self), '`validate` must be defined'
# return self._execute(self.validate(params))
#
# def _execute(self, params):
# raise NotImplemented()
#
# def __db_caller(self, callback):
# with self.storage.transaction() as cursor:
# return callback(cursor)
#
# def _db_query(self, *args, **kwargs):
# return self.__db_caller(lambda c: apsw_helpers.query(c, *args, **kwargs))
#
# def _db_custom_query(self, callback):
# return self.__db_caller(callback)
#
# def _db_get(self, *args, **kwargs):
# return self.__db_caller(lambda c: apsw_helpers.get(c, *args, **kwargs))
#
# Path: memsql_loader/api/shared.py
# TASKS_TTL = 120
# QUEUED = SuperEnum.E
# RUNNING = SuperEnum.E
# SUCCESS = SuperEnum.E
# ERROR = SuperEnum.E
# CANCELLED = SuperEnum.E
# SUCCESS_CONDITION = 'tasks.result = \'success\''
# ERROR_CONDITION = 'tasks.result = \'error\''
# CANCELLED_CONDITION = 'tasks.result = \'cancelled\''
# FINISHED_CONDITION = 'tasks.finished IS NOT NULL'
# QUEUED_CONDITION = 'tasks.finished IS NULL AND (tasks.execution_id IS NULL OR tasks.last_contact <= datetime(:now, "unixepoch", "-%s second"))' % TASKS_TTL
# PROJECTION = re.sub(r'\s+', ' ', """
# (CASE
# WHEN (%s) THEN UPPER(tasks.result)
# WHEN (%s) THEN 'QUEUED'
# ELSE 'RUNNING'
# END)
# """ % (FINISHED_CONDITION, QUEUED_CONDITION)).strip()
# QUEUED = SuperEnum.E
# RUNNING = SuperEnum.E
# FINISHED = SuperEnum.E
# CANCELLED = SuperEnum.E
# PROJECTION = re.sub(r'\s+', ' ', '''
# (CASE
# WHEN (
# (job_tasks.tasks_total - job_tasks.tasks_finished) = 0
# AND job_tasks.tasks_cancelled > 0) THEN 'CANCELLED'
# WHEN (
# job_tasks.tasks_total IS NULL
# OR job_tasks.tasks_finished = job_tasks.tasks_total) THEN 'FINISHED'
# WHEN (job_tasks.tasks_queued = job_tasks.tasks_total) THEN 'QUEUED'
# ELSE 'RUNNING'
# END)
# ''').strip()
# DESC = SuperEnum.E
# ASC = SuperEnum.E
# class TaskState(SuperEnum):
# class JobState(SuperEnum):
# class SortDirection(SuperEnum):
# def projection_params():
# def task_load_row(row):
# def job_load_row(row):
#
# Path: memsql_loader/api/exceptions.py
# class ApiException(Exception):
# class DBConnectionIssue(ApiException):
# class DBError(ApiException):
# def __str__(self):
# def __init__(self, *args):
# def __str__(self):
#
# Path: memsql_loader/api/validation.py
# def listor(sub_validator):
# def _validate(value):
# def validate_enum(EnumType):
# def _validate(value):
. Output only the next line. | SELECT |
Given snippet: <|code_start|> validate = V.Schema({
V.Required('job_id'): basestring
})
def _execute(self, params):
generated_sql, query_params = self._generate_sql(params)
row = self._db_query("""
SELECT
id,
created,
spec,
IFNULL(tasks_total, 0) AS tasks_total,
IFNULL(tasks_queued, 0) AS tasks_queued,
-- Tasks that are cancelled count as finished also.
-- It is always true that if one is null, all are null
IFNULL(tasks_total - tasks_queued - tasks_finished, 0) AS tasks_running,
IFNULL(tasks_finished, 0) AS tasks_finished,
IFNULL(tasks_cancelled, 0) AS tasks_cancelled,
IFNULL(tasks_succeeded, 0) AS tasks_succeeded,
IFNULL(tasks_errored, 0) AS tasks_errored,
%(state_projection)s AS state
FROM
jobs
LEFT JOIN(
SELECT
tasks.job_id,
-- counts
COUNT(tasks.id) AS tasks_total,
CAST(SUM(%(success_cond)s) AS SIGNED) AS tasks_succeeded,
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from memsql_loader.api.base import Api
from memsql_loader.api import shared, exceptions
from memsql_loader.api.validation import V
and context:
# Path: memsql_loader/api/base.py
# class Api(object):
# name = None
#
# def __init__(self):
# self.logger = log.get_logger(self.name or 'api')
# self.storage = LoaderStorage()
#
# def query(self, params):
# assert 'validate' in dir(self), '`validate` must be defined'
# return self._execute(self.validate(params))
#
# def _execute(self, params):
# raise NotImplemented()
#
# def __db_caller(self, callback):
# with self.storage.transaction() as cursor:
# return callback(cursor)
#
# def _db_query(self, *args, **kwargs):
# return self.__db_caller(lambda c: apsw_helpers.query(c, *args, **kwargs))
#
# def _db_custom_query(self, callback):
# return self.__db_caller(callback)
#
# def _db_get(self, *args, **kwargs):
# return self.__db_caller(lambda c: apsw_helpers.get(c, *args, **kwargs))
#
# Path: memsql_loader/api/shared.py
# TASKS_TTL = 120
# QUEUED = SuperEnum.E
# RUNNING = SuperEnum.E
# SUCCESS = SuperEnum.E
# ERROR = SuperEnum.E
# CANCELLED = SuperEnum.E
# SUCCESS_CONDITION = 'tasks.result = \'success\''
# ERROR_CONDITION = 'tasks.result = \'error\''
# CANCELLED_CONDITION = 'tasks.result = \'cancelled\''
# FINISHED_CONDITION = 'tasks.finished IS NOT NULL'
# QUEUED_CONDITION = 'tasks.finished IS NULL AND (tasks.execution_id IS NULL OR tasks.last_contact <= datetime(:now, "unixepoch", "-%s second"))' % TASKS_TTL
# PROJECTION = re.sub(r'\s+', ' ', """
# (CASE
# WHEN (%s) THEN UPPER(tasks.result)
# WHEN (%s) THEN 'QUEUED'
# ELSE 'RUNNING'
# END)
# """ % (FINISHED_CONDITION, QUEUED_CONDITION)).strip()
# QUEUED = SuperEnum.E
# RUNNING = SuperEnum.E
# FINISHED = SuperEnum.E
# CANCELLED = SuperEnum.E
# PROJECTION = re.sub(r'\s+', ' ', '''
# (CASE
# WHEN (
# (job_tasks.tasks_total - job_tasks.tasks_finished) = 0
# AND job_tasks.tasks_cancelled > 0) THEN 'CANCELLED'
# WHEN (
# job_tasks.tasks_total IS NULL
# OR job_tasks.tasks_finished = job_tasks.tasks_total) THEN 'FINISHED'
# WHEN (job_tasks.tasks_queued = job_tasks.tasks_total) THEN 'QUEUED'
# ELSE 'RUNNING'
# END)
# ''').strip()
# DESC = SuperEnum.E
# ASC = SuperEnum.E
# class TaskState(SuperEnum):
# class JobState(SuperEnum):
# class SortDirection(SuperEnum):
# def projection_params():
# def task_load_row(row):
# def job_load_row(row):
#
# Path: memsql_loader/api/exceptions.py
# class ApiException(Exception):
# class DBConnectionIssue(ApiException):
# class DBError(ApiException):
# def __str__(self):
# def __init__(self, *args):
# def __str__(self):
#
# Path: memsql_loader/api/validation.py
# def listor(sub_validator):
# def _validate(value):
# def validate_enum(EnumType):
# def _validate(value):
which might include code, classes, or functions. Output only the next line. | CAST(SUM(%(error_cond)s) AS SIGNED) AS tasks_errored, |
Using the snippet: <|code_start|>
class Job(Api):
name = 'Job'
validate = V.Schema({
V.Required('job_id'): basestring
})
def _execute(self, params):
generated_sql, query_params = self._generate_sql(params)
row = self._db_query("""
SELECT
id,
created,
spec,
<|code_end|>
, determine the next line of code. You have imports:
from memsql_loader.api.base import Api
from memsql_loader.api import shared, exceptions
from memsql_loader.api.validation import V
and context (class names, function names, or code) available:
# Path: memsql_loader/api/base.py
# class Api(object):
# name = None
#
# def __init__(self):
# self.logger = log.get_logger(self.name or 'api')
# self.storage = LoaderStorage()
#
# def query(self, params):
# assert 'validate' in dir(self), '`validate` must be defined'
# return self._execute(self.validate(params))
#
# def _execute(self, params):
# raise NotImplemented()
#
# def __db_caller(self, callback):
# with self.storage.transaction() as cursor:
# return callback(cursor)
#
# def _db_query(self, *args, **kwargs):
# return self.__db_caller(lambda c: apsw_helpers.query(c, *args, **kwargs))
#
# def _db_custom_query(self, callback):
# return self.__db_caller(callback)
#
# def _db_get(self, *args, **kwargs):
# return self.__db_caller(lambda c: apsw_helpers.get(c, *args, **kwargs))
#
# Path: memsql_loader/api/shared.py
# TASKS_TTL = 120
# QUEUED = SuperEnum.E
# RUNNING = SuperEnum.E
# SUCCESS = SuperEnum.E
# ERROR = SuperEnum.E
# CANCELLED = SuperEnum.E
# SUCCESS_CONDITION = 'tasks.result = \'success\''
# ERROR_CONDITION = 'tasks.result = \'error\''
# CANCELLED_CONDITION = 'tasks.result = \'cancelled\''
# FINISHED_CONDITION = 'tasks.finished IS NOT NULL'
# QUEUED_CONDITION = 'tasks.finished IS NULL AND (tasks.execution_id IS NULL OR tasks.last_contact <= datetime(:now, "unixepoch", "-%s second"))' % TASKS_TTL
# PROJECTION = re.sub(r'\s+', ' ', """
# (CASE
# WHEN (%s) THEN UPPER(tasks.result)
# WHEN (%s) THEN 'QUEUED'
# ELSE 'RUNNING'
# END)
# """ % (FINISHED_CONDITION, QUEUED_CONDITION)).strip()
# QUEUED = SuperEnum.E
# RUNNING = SuperEnum.E
# FINISHED = SuperEnum.E
# CANCELLED = SuperEnum.E
# PROJECTION = re.sub(r'\s+', ' ', '''
# (CASE
# WHEN (
# (job_tasks.tasks_total - job_tasks.tasks_finished) = 0
# AND job_tasks.tasks_cancelled > 0) THEN 'CANCELLED'
# WHEN (
# job_tasks.tasks_total IS NULL
# OR job_tasks.tasks_finished = job_tasks.tasks_total) THEN 'FINISHED'
# WHEN (job_tasks.tasks_queued = job_tasks.tasks_total) THEN 'QUEUED'
# ELSE 'RUNNING'
# END)
# ''').strip()
# DESC = SuperEnum.E
# ASC = SuperEnum.E
# class TaskState(SuperEnum):
# class JobState(SuperEnum):
# class SortDirection(SuperEnum):
# def projection_params():
# def task_load_row(row):
# def job_load_row(row):
#
# Path: memsql_loader/api/exceptions.py
# class ApiException(Exception):
# class DBConnectionIssue(ApiException):
# class DBError(ApiException):
# def __str__(self):
# def __init__(self, *args):
# def __str__(self):
#
# Path: memsql_loader/api/validation.py
# def listor(sub_validator):
# def _validate(value):
# def validate_enum(EnumType):
# def _validate(value):
. Output only the next line. | IFNULL(tasks_total, 0) AS tasks_total, |
Given snippet: <|code_start|>
class CancelTask(Command):
@staticmethod
def configure(parser, subparsers):
subparser = subparsers.add_parser('cancel-task', help='Cancel a specific task')
subparser.set_defaults(command=CancelTask)
subparser.add_argument('task_id', type=int,
help='The ID of the task to cancel')
def run(self):
self.logger = log.get_logger('CancelTask')
self.tasks = Tasks()
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from memsql_loader.util.command import Command
from memsql_loader.util import log
from memsql_loader.loader_db.tasks import Tasks
and context:
# Path: memsql_loader/util/command.py
# class Command(object):
# def __init__(self, options):
# self.options = options
# self.ensure_bootstrapped()
# self.run()
#
# @staticmethod
# def configure(parser, subparsers):
# raise NotImplemented('Every command needs a static configure(...) method')
#
# def ensure_bootstrapped(self):
# if not bootstrap.check_bootstrapped():
# bootstrap.bootstrap()
#
# def run():
# raise NotImplemented('Every command needs a run method, otherwise not much is going to happen')
#
# Path: memsql_loader/util/log.py
# def setup(log_path=None, stdout_enabled=True):
# def update_verbosity(debug=False, extra_verbose=False):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def configure(parser):
# def get_logger(name, level=None):
# def format(self, record):
# def __init__(self, name, level=logging.DEBUG):
# class _SetDebug(argparse.Action):
# class _SetStdout(argparse.Action):
# class _SetLogPath(argparse.Action):
# class Formatter(logging.Formatter):
# class Logger(logging.Logger):
#
# Path: memsql_loader/loader_db/tasks.py
# class Tasks(apsw_sql_step_queue.APSWSQLStepQueue):
# def __init__(self):
# storage = LoaderStorage()
# super(Tasks, self).__init__('tasks', storage, execution_ttl=api.shared.TASKS_TTL, task_handler_class=TaskHandler)
#
# # NOTE: This method overrides bulk_finish on APSWSQLStepQueue so that it
# # finishes tasks even if they are currently running.
# def bulk_finish(self, result='cancelled', extra_predicate=None):
# extra_predicate_sql, extra_predicate_args = (
# self._build_extra_predicate(extra_predicate))
#
# with self.storage.transaction() as cursor:
# now = unix_timestamp(datetime.utcnow())
# affected_rows = apsw_helpers.query(cursor, '''
# SELECT * from %s
# WHERE
# finished IS NULL
# %s
# ''' % (self.table_name, extra_predicate_sql),
# **extra_predicate_args)
# apsw_helpers.query(cursor, '''
# UPDATE %s
# SET
# execution_id = 0,
# last_contact = datetime(:now, 'unixepoch'),
# update_count = update_count + 1,
# steps = '[]',
# started = datetime(:now, 'unixepoch'),
# finished = datetime(:now, 'unixepoch'),
# result = :result
# WHERE
# finished IS NULL
# %s
# ''' % (self.table_name, extra_predicate_sql),
# now=now,
# result=result,
# **extra_predicate_args)
#
# return len(affected_rows)
#
# def get_tasks_in_state(self, state, extra_predicate=None):
# extra_predicate_sql, extra_predicate_args = (
# self._build_extra_predicate(extra_predicate))
#
# query_params = api.shared.TaskState.projection_params()
# if len(state) == 1:
# state_list = "('" + str(state[0]) + "')"
# else:
# state_list = str(tuple(str(v) for v in state ))
# query_params.update(extra_predicate_args)
# with self.storage.cursor() as cursor:
# rows = apsw_helpers.query(cursor, '''
# SELECT *
# FROM %s
# WHERE
# %s IN %s
# %s
# ORDER BY id ASC
# ''' % (self.table_name, api.shared.TaskState.PROJECTION, state_list, extra_predicate_sql),
# **query_params)
#
# return [ api.shared.task_load_row(row) for row in rows ]
which might include code, classes, or functions. Output only the next line. | rows_affected = self.tasks.bulk_finish(extra_predicate=('id = :task_id', { 'task_id': self.options.task_id })) |
Here is a snippet: <|code_start|>
class CancelTask(Command):
@staticmethod
def configure(parser, subparsers):
subparser = subparsers.add_parser('cancel-task', help='Cancel a specific task')
subparser.set_defaults(command=CancelTask)
subparser.add_argument('task_id', type=int,
help='The ID of the task to cancel')
def run(self):
self.logger = log.get_logger('CancelTask')
self.tasks = Tasks()
rows_affected = self.tasks.bulk_finish(extra_predicate=('id = :task_id', { 'task_id': self.options.task_id }))
<|code_end|>
. Write the next line using the current file imports:
from memsql_loader.util.command import Command
from memsql_loader.util import log
from memsql_loader.loader_db.tasks import Tasks
and context from other files:
# Path: memsql_loader/util/command.py
# class Command(object):
# def __init__(self, options):
# self.options = options
# self.ensure_bootstrapped()
# self.run()
#
# @staticmethod
# def configure(parser, subparsers):
# raise NotImplemented('Every command needs a static configure(...) method')
#
# def ensure_bootstrapped(self):
# if not bootstrap.check_bootstrapped():
# bootstrap.bootstrap()
#
# def run():
# raise NotImplemented('Every command needs a run method, otherwise not much is going to happen')
#
# Path: memsql_loader/util/log.py
# def setup(log_path=None, stdout_enabled=True):
# def update_verbosity(debug=False, extra_verbose=False):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def configure(parser):
# def get_logger(name, level=None):
# def format(self, record):
# def __init__(self, name, level=logging.DEBUG):
# class _SetDebug(argparse.Action):
# class _SetStdout(argparse.Action):
# class _SetLogPath(argparse.Action):
# class Formatter(logging.Formatter):
# class Logger(logging.Logger):
#
# Path: memsql_loader/loader_db/tasks.py
# class Tasks(apsw_sql_step_queue.APSWSQLStepQueue):
# def __init__(self):
# storage = LoaderStorage()
# super(Tasks, self).__init__('tasks', storage, execution_ttl=api.shared.TASKS_TTL, task_handler_class=TaskHandler)
#
# # NOTE: This method overrides bulk_finish on APSWSQLStepQueue so that it
# # finishes tasks even if they are currently running.
# def bulk_finish(self, result='cancelled', extra_predicate=None):
# extra_predicate_sql, extra_predicate_args = (
# self._build_extra_predicate(extra_predicate))
#
# with self.storage.transaction() as cursor:
# now = unix_timestamp(datetime.utcnow())
# affected_rows = apsw_helpers.query(cursor, '''
# SELECT * from %s
# WHERE
# finished IS NULL
# %s
# ''' % (self.table_name, extra_predicate_sql),
# **extra_predicate_args)
# apsw_helpers.query(cursor, '''
# UPDATE %s
# SET
# execution_id = 0,
# last_contact = datetime(:now, 'unixepoch'),
# update_count = update_count + 1,
# steps = '[]',
# started = datetime(:now, 'unixepoch'),
# finished = datetime(:now, 'unixepoch'),
# result = :result
# WHERE
# finished IS NULL
# %s
# ''' % (self.table_name, extra_predicate_sql),
# now=now,
# result=result,
# **extra_predicate_args)
#
# return len(affected_rows)
#
# def get_tasks_in_state(self, state, extra_predicate=None):
# extra_predicate_sql, extra_predicate_args = (
# self._build_extra_predicate(extra_predicate))
#
# query_params = api.shared.TaskState.projection_params()
# if len(state) == 1:
# state_list = "('" + str(state[0]) + "')"
# else:
# state_list = str(tuple(str(v) for v in state ))
# query_params.update(extra_predicate_args)
# with self.storage.cursor() as cursor:
# rows = apsw_helpers.query(cursor, '''
# SELECT *
# FROM %s
# WHERE
# %s IN %s
# %s
# ORDER BY id ASC
# ''' % (self.table_name, api.shared.TaskState.PROJECTION, state_list, extra_predicate_sql),
# **query_params)
#
# return [ api.shared.task_load_row(row) for row in rows ]
, which may include functions, classes, or code. Output only the next line. | plural = not rows_affected == 1 |
Using the snippet: <|code_start|>
class CancelTask(Command):
@staticmethod
def configure(parser, subparsers):
subparser = subparsers.add_parser('cancel-task', help='Cancel a specific task')
subparser.set_defaults(command=CancelTask)
subparser.add_argument('task_id', type=int,
help='The ID of the task to cancel')
def run(self):
self.logger = log.get_logger('CancelTask')
self.tasks = Tasks()
rows_affected = self.tasks.bulk_finish(extra_predicate=('id = :task_id', { 'task_id': self.options.task_id }))
plural = not rows_affected == 1
<|code_end|>
, determine the next line of code. You have imports:
from memsql_loader.util.command import Command
from memsql_loader.util import log
from memsql_loader.loader_db.tasks import Tasks
and context (class names, function names, or code) available:
# Path: memsql_loader/util/command.py
# class Command(object):
# def __init__(self, options):
# self.options = options
# self.ensure_bootstrapped()
# self.run()
#
# @staticmethod
# def configure(parser, subparsers):
# raise NotImplemented('Every command needs a static configure(...) method')
#
# def ensure_bootstrapped(self):
# if not bootstrap.check_bootstrapped():
# bootstrap.bootstrap()
#
# def run():
# raise NotImplemented('Every command needs a run method, otherwise not much is going to happen')
#
# Path: memsql_loader/util/log.py
# def setup(log_path=None, stdout_enabled=True):
# def update_verbosity(debug=False, extra_verbose=False):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def configure(parser):
# def get_logger(name, level=None):
# def format(self, record):
# def __init__(self, name, level=logging.DEBUG):
# class _SetDebug(argparse.Action):
# class _SetStdout(argparse.Action):
# class _SetLogPath(argparse.Action):
# class Formatter(logging.Formatter):
# class Logger(logging.Logger):
#
# Path: memsql_loader/loader_db/tasks.py
# class Tasks(apsw_sql_step_queue.APSWSQLStepQueue):
# def __init__(self):
# storage = LoaderStorage()
# super(Tasks, self).__init__('tasks', storage, execution_ttl=api.shared.TASKS_TTL, task_handler_class=TaskHandler)
#
# # NOTE: This method overrides bulk_finish on APSWSQLStepQueue so that it
# # finishes tasks even if they are currently running.
# def bulk_finish(self, result='cancelled', extra_predicate=None):
# extra_predicate_sql, extra_predicate_args = (
# self._build_extra_predicate(extra_predicate))
#
# with self.storage.transaction() as cursor:
# now = unix_timestamp(datetime.utcnow())
# affected_rows = apsw_helpers.query(cursor, '''
# SELECT * from %s
# WHERE
# finished IS NULL
# %s
# ''' % (self.table_name, extra_predicate_sql),
# **extra_predicate_args)
# apsw_helpers.query(cursor, '''
# UPDATE %s
# SET
# execution_id = 0,
# last_contact = datetime(:now, 'unixepoch'),
# update_count = update_count + 1,
# steps = '[]',
# started = datetime(:now, 'unixepoch'),
# finished = datetime(:now, 'unixepoch'),
# result = :result
# WHERE
# finished IS NULL
# %s
# ''' % (self.table_name, extra_predicate_sql),
# now=now,
# result=result,
# **extra_predicate_args)
#
# return len(affected_rows)
#
# def get_tasks_in_state(self, state, extra_predicate=None):
# extra_predicate_sql, extra_predicate_args = (
# self._build_extra_predicate(extra_predicate))
#
# query_params = api.shared.TaskState.projection_params()
# if len(state) == 1:
# state_list = "('" + str(state[0]) + "')"
# else:
# state_list = str(tuple(str(v) for v in state ))
# query_params.update(extra_predicate_args)
# with self.storage.cursor() as cursor:
# rows = apsw_helpers.query(cursor, '''
# SELECT *
# FROM %s
# WHERE
# %s IN %s
# %s
# ORDER BY id ASC
# ''' % (self.table_name, api.shared.TaskState.PROJECTION, state_list, extra_predicate_sql),
# **query_params)
#
# return [ api.shared.task_load_row(row) for row in rows ]
. Output only the next line. | print 'Cancelled', rows_affected, 'task%s.' % ('s' if plural else '') |
Here is a snippet: <|code_start|> V.Required("file_id_column", default=None): V.Any(basestring, None),
V.Required("non_local_load", default=False): bool,
V.Required("duplicate_key_method", default="error"): V.Any("error", "replace", "ignore"),
V.Required("script", default=None): V.Any(basestring, None)
})
_db_schema = V.Schema({
V.Required('host', default='127.0.0.1'): basestring,
V.Required('port', default=3306): int,
V.Required('user', default='root'): basestring,
V.Required('password', default=''): basestring,
})
# Each path in paths looks something like:
# [s3://|file://|hdfs://][bucket/]file/pattern
SPEC_VALIDATOR = V.Schema({
V.Required("source"): V.Schema({
V.Required("aws_access_key", default=DEFAULT_AWS_ACCESS_KEY): V.Any(basestring, None),
V.Required("aws_secret_key", default=DEFAULT_AWS_SECRET_KEY): V.Any(basestring, None),
V.Required("hdfs_host", default=None): V.Any(basestring, None),
V.Required("webhdfs_port", default=50070): V.Any(int, None),
V.Required("hdfs_user", default=None): V.Any(basestring, None),
V.Required("paths"): [basestring],
}),
V.Required("connection", default=_db_schema({})): _db_schema,
V.Required("target"): V.Schema({
V.Required("database"): basestring,
V.Required("table"): basestring
}, required=True),
V.Required("options", default=_options_schema({})): _options_schema
<|code_end|>
. Write the next line using the current file imports:
import os
import shlex
import urlparse
import voluptuous as V
from memsql_loader.util.attr_dict import AttrDict
from memsql_loader.util import log
from memsql_loader.vendor import glob2
and context from other files:
# Path: memsql_loader/util/attr_dict.py
# class AttrDict(dict):
# def __getattr__(self, key):
# try:
# return self.__getitem__(key)
# except KeyError:
# # This lets you use dict-type attributes that aren't keys
# return getattr(super(AttrDict, self), key)
#
# def __setattr__(self, key, value):
# return self.__setitem__(key, value)
#
# def __str__(self):
# return self.__repr__()
#
# def __repr__(self):
# return '%s(%s)' % (self.__class__.__name__, dict.__repr__(self))
#
# @staticmethod
# def from_dict(source):
# def _transform(d):
# """ Turns a nested dict into nested AttrDict's """
# for k, v in d.iteritems():
# if isinstance(v, dict):
# d[k] = _transform(v)
# return AttrDict(d)
#
# return _transform(source)
#
# Path: memsql_loader/util/log.py
# def setup(log_path=None, stdout_enabled=True):
# def update_verbosity(debug=False, extra_verbose=False):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def configure(parser):
# def get_logger(name, level=None):
# def format(self, record):
# def __init__(self, name, level=logging.DEBUG):
# class _SetDebug(argparse.Action):
# class _SetStdout(argparse.Action):
# class _SetLogPath(argparse.Action):
# class Formatter(logging.Formatter):
# class Logger(logging.Logger):
, which may include functions, classes, or code. Output only the next line. | }) |
Predict the next line after this snippet: <|code_start|>
class InvalidKeyException(Exception):
pass
class LoadPath(object):
def __init__(self, path):
self.path = path
parsed = urlparse.urlparse(path)
self.bucket = None
if parsed.scheme == 's3':
self.scheme = 's3'
self.bucket = parsed.netloc
# this strips the starting /
self.pattern = parsed.path[1:]
elif parsed.scheme == 'hdfs':
self.scheme = 'hdfs'
self.pattern = parsed.netloc + parsed.path
self.pattern = self.pattern.lstrip('/')
elif parsed.scheme == 'file' or not parsed.scheme:
self.scheme = 'file'
# cannot use os.path.join because of the starting /
self.pattern = parsed.netloc + parsed.path
else:
raise V.Invalid("Unknown file scheme %s" % parsed.scheme, path=[ 'source', 'paths' ])
if self.scheme == 'file' and '|' in self.pattern:
<|code_end|>
using the current file's imports:
import os
import shlex
import urlparse
import voluptuous as V
from memsql_loader.util.attr_dict import AttrDict
from memsql_loader.util import log
from memsql_loader.vendor import glob2
and any relevant context from other files:
# Path: memsql_loader/util/attr_dict.py
# class AttrDict(dict):
# def __getattr__(self, key):
# try:
# return self.__getitem__(key)
# except KeyError:
# # This lets you use dict-type attributes that aren't keys
# return getattr(super(AttrDict, self), key)
#
# def __setattr__(self, key, value):
# return self.__setitem__(key, value)
#
# def __str__(self):
# return self.__repr__()
#
# def __repr__(self):
# return '%s(%s)' % (self.__class__.__name__, dict.__repr__(self))
#
# @staticmethod
# def from_dict(source):
# def _transform(d):
# """ Turns a nested dict into nested AttrDict's """
# for k, v in d.iteritems():
# if isinstance(v, dict):
# d[k] = _transform(v)
# return AttrDict(d)
#
# return _transform(source)
#
# Path: memsql_loader/util/log.py
# def setup(log_path=None, stdout_enabled=True):
# def update_verbosity(debug=False, extra_verbose=False):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def configure(parser):
# def get_logger(name, level=None):
# def format(self, record):
# def __init__(self, name, level=logging.DEBUG):
# class _SetDebug(argparse.Action):
# class _SetStdout(argparse.Action):
# class _SetLogPath(argparse.Action):
# class Formatter(logging.Formatter):
# class Logger(logging.Logger):
. Output only the next line. | raise V.Invalid("OR (|) operators are not supported in file patterns", path=[ 'source', 'paths' ]) |
Continue the code snippet: <|code_start|>
class Task(Command):
@staticmethod
def configure(parser, subparsers):
subparser = subparsers.add_parser('task', help='Show information about a single task')
subparser.set_defaults(command=Task)
subparser.add_argument('task_id', type=int,
<|code_end|>
. Use current file imports:
import sys
from clark.super_enum import SuperEnum
from memsql_loader.util.command import Command
from memsql_loader.util import log, super_json as json
from memsql_loader.api import exceptions
from memsql_loader.api.task import Task as TaskApi
and context (classes, functions, or code) from other files:
# Path: memsql_loader/util/command.py
# class Command(object):
# def __init__(self, options):
# self.options = options
# self.ensure_bootstrapped()
# self.run()
#
# @staticmethod
# def configure(parser, subparsers):
# raise NotImplemented('Every command needs a static configure(...) method')
#
# def ensure_bootstrapped(self):
# if not bootstrap.check_bootstrapped():
# bootstrap.bootstrap()
#
# def run():
# raise NotImplemented('Every command needs a run method, otherwise not much is going to happen')
#
# Path: memsql_loader/util/log.py
# def setup(log_path=None, stdout_enabled=True):
# def update_verbosity(debug=False, extra_verbose=False):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def configure(parser):
# def get_logger(name, level=None):
# def format(self, record):
# def __init__(self, name, level=logging.DEBUG):
# class _SetDebug(argparse.Action):
# class _SetStdout(argparse.Action):
# class _SetLogPath(argparse.Action):
# class Formatter(logging.Formatter):
# class Logger(logging.Logger):
#
# Path: memsql_loader/util/super_json.py
# def simplejson_datetime_serializer(obj):
# def _set_defaults(kwargs):
# def dumps(data, **kwargs):
# def loads(data, **kwargs):
# def safe_loads(data, default, **kwargs):
# def pformat(d):
#
# Path: memsql_loader/api/exceptions.py
# class ApiException(Exception):
# class DBConnectionIssue(ApiException):
# class DBError(ApiException):
# def __str__(self):
# def __init__(self, *args):
# def __str__(self):
#
# Path: memsql_loader/api/task.py
# class Task(Api):
# validate = V.Schema({
# V.Required('task_id'): int
# })
#
# def _execute(self, params):
# generated_sql, query_params = self._generate_sql(params)
#
# task_row = self._db_get('''
# SELECT *, %(state_projection)s AS state
# FROM tasks
# WHERE
# %(task_id_predicate)s
# LIMIT 1
# ''' % generated_sql, **query_params)
#
# if not task_row:
# raise exceptions.ApiException('No task found with id `%s`' % params['task_id'])
#
# return shared.task_load_row(task_row)
#
# def _generate_sql(self, params):
# query_params = shared.TaskState.projection_params()
# return { k: v or '' for k, v in {
# 'task_id_predicate': self._task_id_predicate(params, query_params),
# 'state_projection': shared.TaskState.PROJECTION,
# }.iteritems() }, query_params
#
# def _task_id_predicate(self, params, query_params):
# query_params['task_id_predicate'] = params['task_id']
# return 'tasks.id = :task_id_predicate'
. Output only the next line. | help='The ID of the task to lookup') |
Here is a snippet: <|code_start|>
class Task(Command):
@staticmethod
def configure(parser, subparsers):
subparser = subparsers.add_parser('task', help='Show information about a single task')
subparser.set_defaults(command=Task)
subparser.add_argument('task_id', type=int,
help='The ID of the task to lookup')
def run(self):
self.logger = log.get_logger('Task')
self.task_api = TaskApi()
<|code_end|>
. Write the next line using the current file imports:
import sys
from clark.super_enum import SuperEnum
from memsql_loader.util.command import Command
from memsql_loader.util import log, super_json as json
from memsql_loader.api import exceptions
from memsql_loader.api.task import Task as TaskApi
and context from other files:
# Path: memsql_loader/util/command.py
# class Command(object):
# def __init__(self, options):
# self.options = options
# self.ensure_bootstrapped()
# self.run()
#
# @staticmethod
# def configure(parser, subparsers):
# raise NotImplemented('Every command needs a static configure(...) method')
#
# def ensure_bootstrapped(self):
# if not bootstrap.check_bootstrapped():
# bootstrap.bootstrap()
#
# def run():
# raise NotImplemented('Every command needs a run method, otherwise not much is going to happen')
#
# Path: memsql_loader/util/log.py
# def setup(log_path=None, stdout_enabled=True):
# def update_verbosity(debug=False, extra_verbose=False):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def configure(parser):
# def get_logger(name, level=None):
# def format(self, record):
# def __init__(self, name, level=logging.DEBUG):
# class _SetDebug(argparse.Action):
# class _SetStdout(argparse.Action):
# class _SetLogPath(argparse.Action):
# class Formatter(logging.Formatter):
# class Logger(logging.Logger):
#
# Path: memsql_loader/util/super_json.py
# def simplejson_datetime_serializer(obj):
# def _set_defaults(kwargs):
# def dumps(data, **kwargs):
# def loads(data, **kwargs):
# def safe_loads(data, default, **kwargs):
# def pformat(d):
#
# Path: memsql_loader/api/exceptions.py
# class ApiException(Exception):
# class DBConnectionIssue(ApiException):
# class DBError(ApiException):
# def __str__(self):
# def __init__(self, *args):
# def __str__(self):
#
# Path: memsql_loader/api/task.py
# class Task(Api):
# validate = V.Schema({
# V.Required('task_id'): int
# })
#
# def _execute(self, params):
# generated_sql, query_params = self._generate_sql(params)
#
# task_row = self._db_get('''
# SELECT *, %(state_projection)s AS state
# FROM tasks
# WHERE
# %(task_id_predicate)s
# LIMIT 1
# ''' % generated_sql, **query_params)
#
# if not task_row:
# raise exceptions.ApiException('No task found with id `%s`' % params['task_id'])
#
# return shared.task_load_row(task_row)
#
# def _generate_sql(self, params):
# query_params = shared.TaskState.projection_params()
# return { k: v or '' for k, v in {
# 'task_id_predicate': self._task_id_predicate(params, query_params),
# 'state_projection': shared.TaskState.PROJECTION,
# }.iteritems() }, query_params
#
# def _task_id_predicate(self, params, query_params):
# query_params['task_id_predicate'] = params['task_id']
# return 'tasks.id = :task_id_predicate'
, which may include functions, classes, or code. Output only the next line. | try: |
Given the following code snippet before the placeholder: <|code_start|>
class Task(Command):
@staticmethod
def configure(parser, subparsers):
subparser = subparsers.add_parser('task', help='Show information about a single task')
subparser.set_defaults(command=Task)
subparser.add_argument('task_id', type=int,
help='The ID of the task to lookup')
def run(self):
self.logger = log.get_logger('Task')
self.task_api = TaskApi()
try:
result = self.task_api.query({ 'task_id': self.options.task_id })
except exceptions.ApiException as e:
<|code_end|>
, predict the next line using imports from the current file:
import sys
from clark.super_enum import SuperEnum
from memsql_loader.util.command import Command
from memsql_loader.util import log, super_json as json
from memsql_loader.api import exceptions
from memsql_loader.api.task import Task as TaskApi
and context including class names, function names, and sometimes code from other files:
# Path: memsql_loader/util/command.py
# class Command(object):
# def __init__(self, options):
# self.options = options
# self.ensure_bootstrapped()
# self.run()
#
# @staticmethod
# def configure(parser, subparsers):
# raise NotImplemented('Every command needs a static configure(...) method')
#
# def ensure_bootstrapped(self):
# if not bootstrap.check_bootstrapped():
# bootstrap.bootstrap()
#
# def run():
# raise NotImplemented('Every command needs a run method, otherwise not much is going to happen')
#
# Path: memsql_loader/util/log.py
# def setup(log_path=None, stdout_enabled=True):
# def update_verbosity(debug=False, extra_verbose=False):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def __call__(self, parser, namespace, value, option_string=None):
# def configure(parser):
# def get_logger(name, level=None):
# def format(self, record):
# def __init__(self, name, level=logging.DEBUG):
# class _SetDebug(argparse.Action):
# class _SetStdout(argparse.Action):
# class _SetLogPath(argparse.Action):
# class Formatter(logging.Formatter):
# class Logger(logging.Logger):
#
# Path: memsql_loader/util/super_json.py
# def simplejson_datetime_serializer(obj):
# def _set_defaults(kwargs):
# def dumps(data, **kwargs):
# def loads(data, **kwargs):
# def safe_loads(data, default, **kwargs):
# def pformat(d):
#
# Path: memsql_loader/api/exceptions.py
# class ApiException(Exception):
# class DBConnectionIssue(ApiException):
# class DBError(ApiException):
# def __str__(self):
# def __init__(self, *args):
# def __str__(self):
#
# Path: memsql_loader/api/task.py
# class Task(Api):
# validate = V.Schema({
# V.Required('task_id'): int
# })
#
# def _execute(self, params):
# generated_sql, query_params = self._generate_sql(params)
#
# task_row = self._db_get('''
# SELECT *, %(state_projection)s AS state
# FROM tasks
# WHERE
# %(task_id_predicate)s
# LIMIT 1
# ''' % generated_sql, **query_params)
#
# if not task_row:
# raise exceptions.ApiException('No task found with id `%s`' % params['task_id'])
#
# return shared.task_load_row(task_row)
#
# def _generate_sql(self, params):
# query_params = shared.TaskState.projection_params()
# return { k: v or '' for k, v in {
# 'task_id_predicate': self._task_id_predicate(params, query_params),
# 'state_projection': shared.TaskState.PROJECTION,
# }.iteritems() }, query_params
#
# def _task_id_predicate(self, params, query_params):
# query_params['task_id_predicate'] = params['task_id']
# return 'tasks.id = :task_id_predicate'
. Output only the next line. | print e.message |
Predict the next line after this snippet: <|code_start|>def delete_pid_file():
try:
os.remove(get_pid_file_path())
except Exception:
pass
def write_pid_file():
atexit.register(delete_pid_file)
with open(get_pid_file_path(), 'w') as f:
f.write("%s\n" % os.getpid())
def get_server_pid():
try:
with open(get_pid_file_path(), 'r') as f:
return int(f.read().strip())
except IOError as e:
if e.errno == errno.ENOENT:
return None
raise
def is_server_running():
pid = get_server_pid()
if pid is None:
return False
try:
# This call will succeed and do nothing if the process exists,
# and it will raise an exception if the process does not exist.
os.kill(pid, 0)
return True
<|code_end|>
using the current file's imports:
import atexit
import errno
import os
from memsql_loader.util import paths
and any relevant context from other files:
# Path: memsql_loader/util/paths.py
# MEMSQL_LOADER_PATH_ENV = "MEMSQL_LOADER_DATA_DIRECTORY"
# def get_data_dir():
. Output only the next line. | except OSError as e: |
Continue the code snippet: <|code_start|>
TASKS_TTL = 120
class TaskState(SuperEnum):
QUEUED = SuperEnum.E
RUNNING = SuperEnum.E
SUCCESS = SuperEnum.E
<|code_end|>
. Use current file imports:
import re
from datetime import datetime
from dateutil import parser
from clark.super_enum import SuperEnum
from memsql_loader.util import super_json as json
from memsql_loader.util.apsw_sql_step_queue.time_helpers import unix_timestamp
and context (classes, functions, or code) from other files:
# Path: memsql_loader/util/super_json.py
# def simplejson_datetime_serializer(obj):
# def _set_defaults(kwargs):
# def dumps(data, **kwargs):
# def loads(data, **kwargs):
# def safe_loads(data, default, **kwargs):
# def pformat(d):
#
# Path: memsql_loader/util/apsw_sql_step_queue/time_helpers.py
# def unix_timestamp(dt):
# return int(time.mktime(dt.timetuple()))
. Output only the next line. | ERROR = SuperEnum.E |
Predict the next line after this snippet: <|code_start|>
TASKS_TTL = 120
class TaskState(SuperEnum):
QUEUED = SuperEnum.E
RUNNING = SuperEnum.E
SUCCESS = SuperEnum.E
ERROR = SuperEnum.E
CANCELLED = SuperEnum.E
SUCCESS_CONDITION = 'tasks.result = \'success\''
ERROR_CONDITION = 'tasks.result = \'error\''
CANCELLED_CONDITION = 'tasks.result = \'cancelled\''
FINISHED_CONDITION = 'tasks.finished IS NOT NULL'
QUEUED_CONDITION = 'tasks.finished IS NULL AND (tasks.execution_id IS NULL OR tasks.last_contact <= datetime(:now, "unixepoch", "-%s second"))' % TASKS_TTL
# The cancelled condition is not necessary here since a cancelled
# task also counts as finished, and UPPER(tasks.result) will return
# what we want in that case.
<|code_end|>
using the current file's imports:
import re
from datetime import datetime
from dateutil import parser
from clark.super_enum import SuperEnum
from memsql_loader.util import super_json as json
from memsql_loader.util.apsw_sql_step_queue.time_helpers import unix_timestamp
and any relevant context from other files:
# Path: memsql_loader/util/super_json.py
# def simplejson_datetime_serializer(obj):
# def _set_defaults(kwargs):
# def dumps(data, **kwargs):
# def loads(data, **kwargs):
# def safe_loads(data, default, **kwargs):
# def pformat(d):
#
# Path: memsql_loader/util/apsw_sql_step_queue/time_helpers.py
# def unix_timestamp(dt):
# return int(time.mktime(dt.timetuple()))
. Output only the next line. | PROJECTION = re.sub(r'\s+', ' ', """ |
Here is a snippet: <|code_start|> SORTABLE_COLUMNS = [ 'id', 'created', 'last_contact', 'state', 'tasks_queued', 'tasks_running', 'tasks_cancelled', 'tasks_errored', 'tasks_finished', 'tasks_total' ]
name = 'Jobs'
validate = V.Schema({
'state': listor(validate_enum(shared.JobState)),
V.Required('order', default=shared.SortDirection.DESC): validate_enum(shared.SortDirection),
V.Required('order_by', default='created'): V.Any(*SORTABLE_COLUMNS),
'page_size': V.Range(1, 10000),
V.Required('page', default=1): V.Range(min=1)
})
def _execute(self, params):
generated_sql, query_params = self._generate_sql(params)
rows = self._db_query("""
SELECT
id,
created,
last_contact,
spec,
IFNULL(tasks_total, 0) AS tasks_total,
IFNULL(tasks_cancelled, 0) AS tasks_cancelled,
IFNULL(tasks_errored, 0) AS tasks_errored,
IFNULL(tasks_queued, 0) AS tasks_queued,
-- Tasks that are cancelled count as finished also
-- It is always true that if one is null, all are null
IFNULL(tasks_total - tasks_queued - tasks_finished, 0) AS tasks_running,
IFNULL(tasks_finished, 0) AS tasks_finished,
%(state_projection)s AS state,
<|code_end|>
. Write the next line using the current file imports:
import dateutil.parser
from memsql_loader.api.base import Api
from memsql_loader.api import shared
from memsql_loader.api.validation import V, validate_enum, listor
and context from other files:
# Path: memsql_loader/api/base.py
# class Api(object):
# name = None
#
# def __init__(self):
# self.logger = log.get_logger(self.name or 'api')
# self.storage = LoaderStorage()
#
# def query(self, params):
# assert 'validate' in dir(self), '`validate` must be defined'
# return self._execute(self.validate(params))
#
# def _execute(self, params):
# raise NotImplemented()
#
# def __db_caller(self, callback):
# with self.storage.transaction() as cursor:
# return callback(cursor)
#
# def _db_query(self, *args, **kwargs):
# return self.__db_caller(lambda c: apsw_helpers.query(c, *args, **kwargs))
#
# def _db_custom_query(self, callback):
# return self.__db_caller(callback)
#
# def _db_get(self, *args, **kwargs):
# return self.__db_caller(lambda c: apsw_helpers.get(c, *args, **kwargs))
#
# Path: memsql_loader/api/shared.py
# TASKS_TTL = 120
# QUEUED = SuperEnum.E
# RUNNING = SuperEnum.E
# SUCCESS = SuperEnum.E
# ERROR = SuperEnum.E
# CANCELLED = SuperEnum.E
# SUCCESS_CONDITION = 'tasks.result = \'success\''
# ERROR_CONDITION = 'tasks.result = \'error\''
# CANCELLED_CONDITION = 'tasks.result = \'cancelled\''
# FINISHED_CONDITION = 'tasks.finished IS NOT NULL'
# QUEUED_CONDITION = 'tasks.finished IS NULL AND (tasks.execution_id IS NULL OR tasks.last_contact <= datetime(:now, "unixepoch", "-%s second"))' % TASKS_TTL
# PROJECTION = re.sub(r'\s+', ' ', """
# (CASE
# WHEN (%s) THEN UPPER(tasks.result)
# WHEN (%s) THEN 'QUEUED'
# ELSE 'RUNNING'
# END)
# """ % (FINISHED_CONDITION, QUEUED_CONDITION)).strip()
# QUEUED = SuperEnum.E
# RUNNING = SuperEnum.E
# FINISHED = SuperEnum.E
# CANCELLED = SuperEnum.E
# PROJECTION = re.sub(r'\s+', ' ', '''
# (CASE
# WHEN (
# (job_tasks.tasks_total - job_tasks.tasks_finished) = 0
# AND job_tasks.tasks_cancelled > 0) THEN 'CANCELLED'
# WHEN (
# job_tasks.tasks_total IS NULL
# OR job_tasks.tasks_finished = job_tasks.tasks_total) THEN 'FINISHED'
# WHEN (job_tasks.tasks_queued = job_tasks.tasks_total) THEN 'QUEUED'
# ELSE 'RUNNING'
# END)
# ''').strip()
# DESC = SuperEnum.E
# ASC = SuperEnum.E
# class TaskState(SuperEnum):
# class JobState(SuperEnum):
# class SortDirection(SuperEnum):
# def projection_params():
# def task_load_row(row):
# def job_load_row(row):
#
# Path: memsql_loader/api/validation.py
# def listor(sub_validator):
# def _validate(value):
# def validate_enum(EnumType):
# def _validate(value):
, which may include functions, classes, or code. Output only the next line. | bytes_total, |
Using the snippet: <|code_start|> name = 'Jobs'
validate = V.Schema({
'state': listor(validate_enum(shared.JobState)),
V.Required('order', default=shared.SortDirection.DESC): validate_enum(shared.SortDirection),
V.Required('order_by', default='created'): V.Any(*SORTABLE_COLUMNS),
'page_size': V.Range(1, 10000),
V.Required('page', default=1): V.Range(min=1)
})
def _execute(self, params):
generated_sql, query_params = self._generate_sql(params)
rows = self._db_query("""
SELECT
id,
created,
last_contact,
spec,
IFNULL(tasks_total, 0) AS tasks_total,
IFNULL(tasks_cancelled, 0) AS tasks_cancelled,
IFNULL(tasks_errored, 0) AS tasks_errored,
IFNULL(tasks_queued, 0) AS tasks_queued,
-- Tasks that are cancelled count as finished also
-- It is always true that if one is null, all are null
IFNULL(tasks_total - tasks_queued - tasks_finished, 0) AS tasks_running,
IFNULL(tasks_finished, 0) AS tasks_finished,
%(state_projection)s AS state,
bytes_total,
bytes_downloaded,
<|code_end|>
, determine the next line of code. You have imports:
import dateutil.parser
from memsql_loader.api.base import Api
from memsql_loader.api import shared
from memsql_loader.api.validation import V, validate_enum, listor
and context (class names, function names, or code) available:
# Path: memsql_loader/api/base.py
# class Api(object):
# name = None
#
# def __init__(self):
# self.logger = log.get_logger(self.name or 'api')
# self.storage = LoaderStorage()
#
# def query(self, params):
# assert 'validate' in dir(self), '`validate` must be defined'
# return self._execute(self.validate(params))
#
# def _execute(self, params):
# raise NotImplemented()
#
# def __db_caller(self, callback):
# with self.storage.transaction() as cursor:
# return callback(cursor)
#
# def _db_query(self, *args, **kwargs):
# return self.__db_caller(lambda c: apsw_helpers.query(c, *args, **kwargs))
#
# def _db_custom_query(self, callback):
# return self.__db_caller(callback)
#
# def _db_get(self, *args, **kwargs):
# return self.__db_caller(lambda c: apsw_helpers.get(c, *args, **kwargs))
#
# Path: memsql_loader/api/shared.py
# TASKS_TTL = 120
# QUEUED = SuperEnum.E
# RUNNING = SuperEnum.E
# SUCCESS = SuperEnum.E
# ERROR = SuperEnum.E
# CANCELLED = SuperEnum.E
# SUCCESS_CONDITION = 'tasks.result = \'success\''
# ERROR_CONDITION = 'tasks.result = \'error\''
# CANCELLED_CONDITION = 'tasks.result = \'cancelled\''
# FINISHED_CONDITION = 'tasks.finished IS NOT NULL'
# QUEUED_CONDITION = 'tasks.finished IS NULL AND (tasks.execution_id IS NULL OR tasks.last_contact <= datetime(:now, "unixepoch", "-%s second"))' % TASKS_TTL
# PROJECTION = re.sub(r'\s+', ' ', """
# (CASE
# WHEN (%s) THEN UPPER(tasks.result)
# WHEN (%s) THEN 'QUEUED'
# ELSE 'RUNNING'
# END)
# """ % (FINISHED_CONDITION, QUEUED_CONDITION)).strip()
# QUEUED = SuperEnum.E
# RUNNING = SuperEnum.E
# FINISHED = SuperEnum.E
# CANCELLED = SuperEnum.E
# PROJECTION = re.sub(r'\s+', ' ', '''
# (CASE
# WHEN (
# (job_tasks.tasks_total - job_tasks.tasks_finished) = 0
# AND job_tasks.tasks_cancelled > 0) THEN 'CANCELLED'
# WHEN (
# job_tasks.tasks_total IS NULL
# OR job_tasks.tasks_finished = job_tasks.tasks_total) THEN 'FINISHED'
# WHEN (job_tasks.tasks_queued = job_tasks.tasks_total) THEN 'QUEUED'
# ELSE 'RUNNING'
# END)
# ''').strip()
# DESC = SuperEnum.E
# ASC = SuperEnum.E
# class TaskState(SuperEnum):
# class JobState(SuperEnum):
# class SortDirection(SuperEnum):
# def projection_params():
# def task_load_row(row):
# def job_load_row(row):
#
# Path: memsql_loader/api/validation.py
# def listor(sub_validator):
# def _validate(value):
# def validate_enum(EnumType):
# def _validate(value):
. Output only the next line. | download_rate, |
Based on the snippet: <|code_start|> created,
last_contact,
spec,
IFNULL(tasks_total, 0) AS tasks_total,
IFNULL(tasks_cancelled, 0) AS tasks_cancelled,
IFNULL(tasks_errored, 0) AS tasks_errored,
IFNULL(tasks_queued, 0) AS tasks_queued,
-- Tasks that are cancelled count as finished also
-- It is always true that if one is null, all are null
IFNULL(tasks_total - tasks_queued - tasks_finished, 0) AS tasks_running,
IFNULL(tasks_finished, 0) AS tasks_finished,
%(state_projection)s AS state,
bytes_total,
bytes_downloaded,
download_rate,
first_task_start
FROM
jobs
LEFT JOIN(
SELECT
tasks.job_id,
MIN(tasks.started) AS first_task_start,
MAX(tasks.last_contact) AS last_contact,
-- counts
-- These casts are necessary because MemSQL makes arbitrary choices
COUNT(tasks.id) AS tasks_total,
CAST(SUM(%(cancelled_cond)s) AS SIGNED) AS tasks_cancelled,
CAST(SUM(%(error_cond)s) AS SIGNED) AS tasks_errored,
<|code_end|>
, predict the immediate next line with the help of imports:
import dateutil.parser
from memsql_loader.api.base import Api
from memsql_loader.api import shared
from memsql_loader.api.validation import V, validate_enum, listor
and context (classes, functions, sometimes code) from other files:
# Path: memsql_loader/api/base.py
# class Api(object):
# name = None
#
# def __init__(self):
# self.logger = log.get_logger(self.name or 'api')
# self.storage = LoaderStorage()
#
# def query(self, params):
# assert 'validate' in dir(self), '`validate` must be defined'
# return self._execute(self.validate(params))
#
# def _execute(self, params):
# raise NotImplemented()
#
# def __db_caller(self, callback):
# with self.storage.transaction() as cursor:
# return callback(cursor)
#
# def _db_query(self, *args, **kwargs):
# return self.__db_caller(lambda c: apsw_helpers.query(c, *args, **kwargs))
#
# def _db_custom_query(self, callback):
# return self.__db_caller(callback)
#
# def _db_get(self, *args, **kwargs):
# return self.__db_caller(lambda c: apsw_helpers.get(c, *args, **kwargs))
#
# Path: memsql_loader/api/shared.py
# TASKS_TTL = 120
# QUEUED = SuperEnum.E
# RUNNING = SuperEnum.E
# SUCCESS = SuperEnum.E
# ERROR = SuperEnum.E
# CANCELLED = SuperEnum.E
# SUCCESS_CONDITION = 'tasks.result = \'success\''
# ERROR_CONDITION = 'tasks.result = \'error\''
# CANCELLED_CONDITION = 'tasks.result = \'cancelled\''
# FINISHED_CONDITION = 'tasks.finished IS NOT NULL'
# QUEUED_CONDITION = 'tasks.finished IS NULL AND (tasks.execution_id IS NULL OR tasks.last_contact <= datetime(:now, "unixepoch", "-%s second"))' % TASKS_TTL
# PROJECTION = re.sub(r'\s+', ' ', """
# (CASE
# WHEN (%s) THEN UPPER(tasks.result)
# WHEN (%s) THEN 'QUEUED'
# ELSE 'RUNNING'
# END)
# """ % (FINISHED_CONDITION, QUEUED_CONDITION)).strip()
# QUEUED = SuperEnum.E
# RUNNING = SuperEnum.E
# FINISHED = SuperEnum.E
# CANCELLED = SuperEnum.E
# PROJECTION = re.sub(r'\s+', ' ', '''
# (CASE
# WHEN (
# (job_tasks.tasks_total - job_tasks.tasks_finished) = 0
# AND job_tasks.tasks_cancelled > 0) THEN 'CANCELLED'
# WHEN (
# job_tasks.tasks_total IS NULL
# OR job_tasks.tasks_finished = job_tasks.tasks_total) THEN 'FINISHED'
# WHEN (job_tasks.tasks_queued = job_tasks.tasks_total) THEN 'QUEUED'
# ELSE 'RUNNING'
# END)
# ''').strip()
# DESC = SuperEnum.E
# ASC = SuperEnum.E
# class TaskState(SuperEnum):
# class JobState(SuperEnum):
# class SortDirection(SuperEnum):
# def projection_params():
# def task_load_row(row):
# def job_load_row(row):
#
# Path: memsql_loader/api/validation.py
# def listor(sub_validator):
# def _validate(value):
# def validate_enum(EnumType):
# def _validate(value):
. Output only the next line. | CAST(SUM(%(finished_cond)s) AS SIGNED) AS tasks_finished, |
Next line prediction: <|code_start|> -- Tasks that are cancelled count as finished also
-- It is always true that if one is null, all are null
IFNULL(tasks_total - tasks_queued - tasks_finished, 0) AS tasks_running,
IFNULL(tasks_finished, 0) AS tasks_finished,
%(state_projection)s AS state,
bytes_total,
bytes_downloaded,
download_rate,
first_task_start
FROM
jobs
LEFT JOIN(
SELECT
tasks.job_id,
MIN(tasks.started) AS first_task_start,
MAX(tasks.last_contact) AS last_contact,
-- counts
-- These casts are necessary because MemSQL makes arbitrary choices
COUNT(tasks.id) AS tasks_total,
CAST(SUM(%(cancelled_cond)s) AS SIGNED) AS tasks_cancelled,
CAST(SUM(%(error_cond)s) AS SIGNED) AS tasks_errored,
CAST(SUM(%(finished_cond)s) AS SIGNED) AS tasks_finished,
CAST(SUM(%(queued_cond)s) AS SIGNED) AS tasks_queued,
-- download information
-- CAST because JSON number types are always floats
CAST(SUM(tasks.bytes_total) AS SIGNED) AS bytes_total,
CAST(SUM(tasks.bytes_downloaded) AS SIGNED) AS bytes_downloaded,
<|code_end|>
. Use current file imports:
(import dateutil.parser
from memsql_loader.api.base import Api
from memsql_loader.api import shared
from memsql_loader.api.validation import V, validate_enum, listor)
and context including class names, function names, or small code snippets from other files:
# Path: memsql_loader/api/base.py
# class Api(object):
# name = None
#
# def __init__(self):
# self.logger = log.get_logger(self.name or 'api')
# self.storage = LoaderStorage()
#
# def query(self, params):
# assert 'validate' in dir(self), '`validate` must be defined'
# return self._execute(self.validate(params))
#
# def _execute(self, params):
# raise NotImplemented()
#
# def __db_caller(self, callback):
# with self.storage.transaction() as cursor:
# return callback(cursor)
#
# def _db_query(self, *args, **kwargs):
# return self.__db_caller(lambda c: apsw_helpers.query(c, *args, **kwargs))
#
# def _db_custom_query(self, callback):
# return self.__db_caller(callback)
#
# def _db_get(self, *args, **kwargs):
# return self.__db_caller(lambda c: apsw_helpers.get(c, *args, **kwargs))
#
# Path: memsql_loader/api/shared.py
# TASKS_TTL = 120
# QUEUED = SuperEnum.E
# RUNNING = SuperEnum.E
# SUCCESS = SuperEnum.E
# ERROR = SuperEnum.E
# CANCELLED = SuperEnum.E
# SUCCESS_CONDITION = 'tasks.result = \'success\''
# ERROR_CONDITION = 'tasks.result = \'error\''
# CANCELLED_CONDITION = 'tasks.result = \'cancelled\''
# FINISHED_CONDITION = 'tasks.finished IS NOT NULL'
# QUEUED_CONDITION = 'tasks.finished IS NULL AND (tasks.execution_id IS NULL OR tasks.last_contact <= datetime(:now, "unixepoch", "-%s second"))' % TASKS_TTL
# PROJECTION = re.sub(r'\s+', ' ', """
# (CASE
# WHEN (%s) THEN UPPER(tasks.result)
# WHEN (%s) THEN 'QUEUED'
# ELSE 'RUNNING'
# END)
# """ % (FINISHED_CONDITION, QUEUED_CONDITION)).strip()
# QUEUED = SuperEnum.E
# RUNNING = SuperEnum.E
# FINISHED = SuperEnum.E
# CANCELLED = SuperEnum.E
# PROJECTION = re.sub(r'\s+', ' ', '''
# (CASE
# WHEN (
# (job_tasks.tasks_total - job_tasks.tasks_finished) = 0
# AND job_tasks.tasks_cancelled > 0) THEN 'CANCELLED'
# WHEN (
# job_tasks.tasks_total IS NULL
# OR job_tasks.tasks_finished = job_tasks.tasks_total) THEN 'FINISHED'
# WHEN (job_tasks.tasks_queued = job_tasks.tasks_total) THEN 'QUEUED'
# ELSE 'RUNNING'
# END)
# ''').strip()
# DESC = SuperEnum.E
# ASC = SuperEnum.E
# class TaskState(SuperEnum):
# class JobState(SuperEnum):
# class SortDirection(SuperEnum):
# def projection_params():
# def task_load_row(row):
# def job_load_row(row):
#
# Path: memsql_loader/api/validation.py
# def listor(sub_validator):
# def _validate(value):
# def validate_enum(EnumType):
# def _validate(value):
. Output only the next line. | CAST(SUM(tasks.download_rate) AS SIGNED) AS download_rate |
Next line prediction: <|code_start|> %(state_projection)s AS state,
bytes_total,
bytes_downloaded,
download_rate,
first_task_start
FROM
jobs
LEFT JOIN(
SELECT
tasks.job_id,
MIN(tasks.started) AS first_task_start,
MAX(tasks.last_contact) AS last_contact,
-- counts
-- These casts are necessary because MemSQL makes arbitrary choices
COUNT(tasks.id) AS tasks_total,
CAST(SUM(%(cancelled_cond)s) AS SIGNED) AS tasks_cancelled,
CAST(SUM(%(error_cond)s) AS SIGNED) AS tasks_errored,
CAST(SUM(%(finished_cond)s) AS SIGNED) AS tasks_finished,
CAST(SUM(%(queued_cond)s) AS SIGNED) AS tasks_queued,
-- download information
-- CAST because JSON number types are always floats
CAST(SUM(tasks.bytes_total) AS SIGNED) AS bytes_total,
CAST(SUM(tasks.bytes_downloaded) AS SIGNED) AS bytes_downloaded,
CAST(SUM(tasks.download_rate) AS SIGNED) AS download_rate
FROM tasks
GROUP BY tasks.job_id
) AS job_tasks ON job_tasks.job_id = jobs.id
<|code_end|>
. Use current file imports:
(import dateutil.parser
from memsql_loader.api.base import Api
from memsql_loader.api import shared
from memsql_loader.api.validation import V, validate_enum, listor)
and context including class names, function names, or small code snippets from other files:
# Path: memsql_loader/api/base.py
# class Api(object):
# name = None
#
# def __init__(self):
# self.logger = log.get_logger(self.name or 'api')
# self.storage = LoaderStorage()
#
# def query(self, params):
# assert 'validate' in dir(self), '`validate` must be defined'
# return self._execute(self.validate(params))
#
# def _execute(self, params):
# raise NotImplemented()
#
# def __db_caller(self, callback):
# with self.storage.transaction() as cursor:
# return callback(cursor)
#
# def _db_query(self, *args, **kwargs):
# return self.__db_caller(lambda c: apsw_helpers.query(c, *args, **kwargs))
#
# def _db_custom_query(self, callback):
# return self.__db_caller(callback)
#
# def _db_get(self, *args, **kwargs):
# return self.__db_caller(lambda c: apsw_helpers.get(c, *args, **kwargs))
#
# Path: memsql_loader/api/shared.py
# TASKS_TTL = 120
# QUEUED = SuperEnum.E
# RUNNING = SuperEnum.E
# SUCCESS = SuperEnum.E
# ERROR = SuperEnum.E
# CANCELLED = SuperEnum.E
# SUCCESS_CONDITION = 'tasks.result = \'success\''
# ERROR_CONDITION = 'tasks.result = \'error\''
# CANCELLED_CONDITION = 'tasks.result = \'cancelled\''
# FINISHED_CONDITION = 'tasks.finished IS NOT NULL'
# QUEUED_CONDITION = 'tasks.finished IS NULL AND (tasks.execution_id IS NULL OR tasks.last_contact <= datetime(:now, "unixepoch", "-%s second"))' % TASKS_TTL
# PROJECTION = re.sub(r'\s+', ' ', """
# (CASE
# WHEN (%s) THEN UPPER(tasks.result)
# WHEN (%s) THEN 'QUEUED'
# ELSE 'RUNNING'
# END)
# """ % (FINISHED_CONDITION, QUEUED_CONDITION)).strip()
# QUEUED = SuperEnum.E
# RUNNING = SuperEnum.E
# FINISHED = SuperEnum.E
# CANCELLED = SuperEnum.E
# PROJECTION = re.sub(r'\s+', ' ', '''
# (CASE
# WHEN (
# (job_tasks.tasks_total - job_tasks.tasks_finished) = 0
# AND job_tasks.tasks_cancelled > 0) THEN 'CANCELLED'
# WHEN (
# job_tasks.tasks_total IS NULL
# OR job_tasks.tasks_finished = job_tasks.tasks_total) THEN 'FINISHED'
# WHEN (job_tasks.tasks_queued = job_tasks.tasks_total) THEN 'QUEUED'
# ELSE 'RUNNING'
# END)
# ''').strip()
# DESC = SuperEnum.E
# ASC = SuperEnum.E
# class TaskState(SuperEnum):
# class JobState(SuperEnum):
# class SortDirection(SuperEnum):
# def projection_params():
# def task_load_row(row):
# def job_load_row(row):
#
# Path: memsql_loader/api/validation.py
# def listor(sub_validator):
# def _validate(value):
# def validate_enum(EnumType):
# def _validate(value):
. Output only the next line. | %(where_expr)s |
Predict the next line after this snippet: <|code_start|>
urlpatterns = patterns(
'',
url(r'^$', FeedListView.as_view(), name='feeds_list'),
url(r'^new/$', FeedCreateView.as_view(), name='feeds_create'),
url(r'^edit/(?P<pk>[\w]+)/$', FeedUpdateView.as_view(), name='feeds_update'),
url(r'^delete/(?P<pk>[\w]+)/$', FeedDeleteView.as_view(), name='feeds_delete'),
<|code_end|>
using the current file's imports:
from django.conf.urls import patterns, url
from .views import FeedListView, FeedCreateView, FeedUpdateView, FeedDeleteView
and any relevant context from other files:
# Path: apps/feeds/views.py
# class FeedListView(LoginRequiredMixin, ListView):
# model = Feed
#
# def get_queryset(self):
# return Feed.objects.filter(created_by=self.request.user)
#
# class FeedCreateView(LoginRequiredMixin, CreateView):
# model = Feed
# form_class = FeedCreateForm
# success_url = reverse_lazy('feeds_list')
#
# def get_initial(self):
# source = self.request.GET.get('source')
# feed = self.request.GET.get('feed')
#
# if source == 'subtome' and feed:
# return {
# 'feed_url': feed
# }
#
# def get_form_kwargs(self, **kwargs):
# kwargs = super(FeedCreateView, self).get_form_kwargs(**kwargs)
# kwargs['user'] = self.request.user
# return kwargs
#
# def form_valid(self, form):
# obj = form.save(commit=False)
# obj.created_by = self.request.user
# return super(FeedCreateView, self).form_valid(form)
#
# class FeedUpdateView(LoginRequiredMixin, UpdateView):
# model = Feed
# form_class = FeedCreateForm
# success_url = '/feeds/edit/%(id)s/'
#
# def get_form_kwargs(self, **kwargs):
# kwargs = super(FeedUpdateView, self).get_form_kwargs(**kwargs)
# kwargs['user'] = self.request.user
# return kwargs
#
# def get_object(self, queryset=None):
# obj = super(FeedUpdateView, self).get_object()
#
# if obj.created_by == self.request.user:
# return obj
#
# def get(self, request, *args, **kwargs):
# response = super(FeedUpdateView, self).get(request, *args, **kwargs)
#
# if not self.object:
# return redirect(reverse_lazy('feeds_list'))
#
# return response
#
# class FeedDeleteView(LoginRequiredMixin, DeleteView):
# model = Feed
# success_url = reverse_lazy('feeds_list')
#
# def get_object(self, queryset=None):
# obj = super(FeedDeleteView, self).get_object()
#
# if obj.created_by == self.request.user:
# return obj
. Output only the next line. | ) |
Continue the code snippet: <|code_start|>
urlpatterns = patterns(
'',
url(r'^$', FeedListView.as_view(), name='feeds_list'),
url(r'^new/$', FeedCreateView.as_view(), name='feeds_create'),
url(r'^edit/(?P<pk>[\w]+)/$', FeedUpdateView.as_view(), name='feeds_update'),
url(r'^delete/(?P<pk>[\w]+)/$', FeedDeleteView.as_view(), name='feeds_delete'),
<|code_end|>
. Use current file imports:
from django.conf.urls import patterns, url
from .views import FeedListView, FeedCreateView, FeedUpdateView, FeedDeleteView
and context (classes, functions, or code) from other files:
# Path: apps/feeds/views.py
# class FeedListView(LoginRequiredMixin, ListView):
# model = Feed
#
# def get_queryset(self):
# return Feed.objects.filter(created_by=self.request.user)
#
# class FeedCreateView(LoginRequiredMixin, CreateView):
# model = Feed
# form_class = FeedCreateForm
# success_url = reverse_lazy('feeds_list')
#
# def get_initial(self):
# source = self.request.GET.get('source')
# feed = self.request.GET.get('feed')
#
# if source == 'subtome' and feed:
# return {
# 'feed_url': feed
# }
#
# def get_form_kwargs(self, **kwargs):
# kwargs = super(FeedCreateView, self).get_form_kwargs(**kwargs)
# kwargs['user'] = self.request.user
# return kwargs
#
# def form_valid(self, form):
# obj = form.save(commit=False)
# obj.created_by = self.request.user
# return super(FeedCreateView, self).form_valid(form)
#
# class FeedUpdateView(LoginRequiredMixin, UpdateView):
# model = Feed
# form_class = FeedCreateForm
# success_url = '/feeds/edit/%(id)s/'
#
# def get_form_kwargs(self, **kwargs):
# kwargs = super(FeedUpdateView, self).get_form_kwargs(**kwargs)
# kwargs['user'] = self.request.user
# return kwargs
#
# def get_object(self, queryset=None):
# obj = super(FeedUpdateView, self).get_object()
#
# if obj.created_by == self.request.user:
# return obj
#
# def get(self, request, *args, **kwargs):
# response = super(FeedUpdateView, self).get(request, *args, **kwargs)
#
# if not self.object:
# return redirect(reverse_lazy('feeds_list'))
#
# return response
#
# class FeedDeleteView(LoginRequiredMixin, DeleteView):
# model = Feed
# success_url = reverse_lazy('feeds_list')
#
# def get_object(self, queryset=None):
# obj = super(FeedDeleteView, self).get_object()
#
# if obj.created_by == self.request.user:
# return obj
. Output only the next line. | ) |
Next line prediction: <|code_start|>
class KipptUserBackend(ModelBackend):
def authenticate(self, username=None, api_token=None):
try:
return KipptUser.objects.get(username=username, api_token=api_token)
except KipptUser.DoesNotExist:
return None
<|code_end|>
. Use current file imports:
(from django.contrib.auth.backends import ModelBackend
from .models import KipptUser)
and context including class names, function names, or small code snippets from other files:
# Path: apps/auth/models.py
# class KipptUser(AbstractUser):
# """
# Defines our custom user model and fields
#
# """
# api_token = models.CharField(max_length=255)
# list_id = models.IntegerField(blank=True, null=True)
#
# def __unicode__(self):
# return self.username
#
# def kippt_client(self):
# return Kippt(self.username, api_token=self.api_token)
. Output only the next line. | def get_user(self, user_id): |
Predict the next line after this snippet: <|code_start|>
class KipptUserAdmin(UserAdmin):
list_display = (
'username', 'first_name', 'last_name', 'email',
'is_active', 'is_staff', 'is_superuser')
list_filter = ('is_staff', 'is_superuser', 'is_active')
search_fields = ['username', 'email', 'first_name', 'last_name']
def __init__(self, *args, **kwargs):
super(KipptUserAdmin, self).__init__(*args, **kwargs)
self.fieldsets += (
<|code_end|>
using the current file's imports:
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from .models import KipptUser
and any relevant context from other files:
# Path: apps/auth/models.py
# class KipptUser(AbstractUser):
# """
# Defines our custom user model and fields
#
# """
# api_token = models.CharField(max_length=255)
# list_id = models.IntegerField(blank=True, null=True)
#
# def __unicode__(self):
# return self.username
#
# def kippt_client(self):
# return Kippt(self.username, api_token=self.api_token)
. Output only the next line. | ('Other info', { |
Here is a snippet: <|code_start|>
class ConnectView(CreateView):
model = KipptUser
form_class = KipptUserConnectForm
def get_context_data(self, **kwargs):
context = super(ConnectView, self).get_context_data(**kwargs)
feeds_create = self.request.build_absolute_uri(reverse('feeds_create'))
context['subtome_url'] = '{}?feed={{feed}}&source=subtome'.format(feeds_create)
return context
def get(self, request, *args, **kwargs):
if request.user.is_authenticated():
<|code_end|>
. Write the next line using the current file imports:
from django.views.generic import CreateView, UpdateView
from django.contrib.auth import authenticate, login
from django.core.urlresolvers import reverse_lazy, reverse
from django.shortcuts import redirect
from braces.views import LoginRequiredMixin
from .models import KipptUser
from .forms import KipptUserConnectForm, KipptUserSetupForm
and context from other files:
# Path: apps/auth/models.py
# class KipptUser(AbstractUser):
# """
# Defines our custom user model and fields
#
# """
# api_token = models.CharField(max_length=255)
# list_id = models.IntegerField(blank=True, null=True)
#
# def __unicode__(self):
# return self.username
#
# def kippt_client(self):
# return Kippt(self.username, api_token=self.api_token)
#
# Path: apps/auth/forms.py
# class KipptUserConnectForm(forms.ModelForm):
#
# class Meta:
# model = KipptUser
# fields = ('username', 'password')
#
# def __init__(self, *args, **kwargs):
# super(KipptUserConnectForm, self).__init__(*args, **kwargs)
#
# self.fields['username'].label = 'Kippt Username'
# self.fields['username'].help_text = ''
# self.fields['password'].label = 'Kippt Password or API Token'
# self.fields['password'].widget = forms.PasswordInput()
#
# def clean(self):
# username = self.cleaned_data.get('username')
# password = self.cleaned_data.get('password')
#
# kippt = Kippt(username, password=password)
# user = kippt.account()
#
# if 'message' in user:
# kippt = Kippt(username, api_token=password)
# user = kippt.account()
#
# if 'message' in user:
# raise forms.ValidationError(user['message'])
#
# self.cleaned_data['api_token'] = user['api_token']
#
# return self.cleaned_data
#
# def save(self, commit=True):
# user, created = KipptUser.objects.get_or_create(
# username=self.cleaned_data['username'],
# )
#
# user.api_token = self.cleaned_data['api_token']
# user.save()
#
# if created:
# user.set_password(None)
#
# return user, created
#
# class KipptUserSetupForm(forms.ModelForm):
# list_id = forms.ChoiceField(label='Default list')
#
# class Meta:
# model = KipptUser
# fields = ('list_id',)
#
# def __init__(self, *args, **kwargs):
# super(KipptUserSetupForm, self).__init__(*args, **kwargs)
#
# kippt = self.instance.kippt_client()
# meta, lists = kippt.lists()
#
# LIST_CHOICES = [('', 'Choose a list to store feed items')]
#
# for kippt_list in lists:
# LIST_CHOICES.append((kippt_list['id'], kippt_list['title']))
#
# self.fields['list_id'].choices = LIST_CHOICES
, which may include functions, classes, or code. Output only the next line. | return redirect(reverse_lazy('feeds_list')) |
Using the snippet: <|code_start|>
class ConnectView(CreateView):
model = KipptUser
form_class = KipptUserConnectForm
def get_context_data(self, **kwargs):
context = super(ConnectView, self).get_context_data(**kwargs)
feeds_create = self.request.build_absolute_uri(reverse('feeds_create'))
context['subtome_url'] = '{}?feed={{feed}}&source=subtome'.format(feeds_create)
return context
def get(self, request, *args, **kwargs):
if request.user.is_authenticated():
return redirect(reverse_lazy('feeds_list'))
return super(ConnectView, self).get(request, *args, **kwargs)
<|code_end|>
, determine the next line of code. You have imports:
from django.views.generic import CreateView, UpdateView
from django.contrib.auth import authenticate, login
from django.core.urlresolvers import reverse_lazy, reverse
from django.shortcuts import redirect
from braces.views import LoginRequiredMixin
from .models import KipptUser
from .forms import KipptUserConnectForm, KipptUserSetupForm
and context (class names, function names, or code) available:
# Path: apps/auth/models.py
# class KipptUser(AbstractUser):
# """
# Defines our custom user model and fields
#
# """
# api_token = models.CharField(max_length=255)
# list_id = models.IntegerField(blank=True, null=True)
#
# def __unicode__(self):
# return self.username
#
# def kippt_client(self):
# return Kippt(self.username, api_token=self.api_token)
#
# Path: apps/auth/forms.py
# class KipptUserConnectForm(forms.ModelForm):
#
# class Meta:
# model = KipptUser
# fields = ('username', 'password')
#
# def __init__(self, *args, **kwargs):
# super(KipptUserConnectForm, self).__init__(*args, **kwargs)
#
# self.fields['username'].label = 'Kippt Username'
# self.fields['username'].help_text = ''
# self.fields['password'].label = 'Kippt Password or API Token'
# self.fields['password'].widget = forms.PasswordInput()
#
# def clean(self):
# username = self.cleaned_data.get('username')
# password = self.cleaned_data.get('password')
#
# kippt = Kippt(username, password=password)
# user = kippt.account()
#
# if 'message' in user:
# kippt = Kippt(username, api_token=password)
# user = kippt.account()
#
# if 'message' in user:
# raise forms.ValidationError(user['message'])
#
# self.cleaned_data['api_token'] = user['api_token']
#
# return self.cleaned_data
#
# def save(self, commit=True):
# user, created = KipptUser.objects.get_or_create(
# username=self.cleaned_data['username'],
# )
#
# user.api_token = self.cleaned_data['api_token']
# user.save()
#
# if created:
# user.set_password(None)
#
# return user, created
#
# class KipptUserSetupForm(forms.ModelForm):
# list_id = forms.ChoiceField(label='Default list')
#
# class Meta:
# model = KipptUser
# fields = ('list_id',)
#
# def __init__(self, *args, **kwargs):
# super(KipptUserSetupForm, self).__init__(*args, **kwargs)
#
# kippt = self.instance.kippt_client()
# meta, lists = kippt.lists()
#
# LIST_CHOICES = [('', 'Choose a list to store feed items')]
#
# for kippt_list in lists:
# LIST_CHOICES.append((kippt_list['id'], kippt_list['title']))
#
# self.fields['list_id'].choices = LIST_CHOICES
. Output only the next line. | def form_valid(self, form): |
Predict the next line after this snippet: <|code_start|>class ConnectView(CreateView):
model = KipptUser
form_class = KipptUserConnectForm
def get_context_data(self, **kwargs):
context = super(ConnectView, self).get_context_data(**kwargs)
feeds_create = self.request.build_absolute_uri(reverse('feeds_create'))
context['subtome_url'] = '{}?feed={{feed}}&source=subtome'.format(feeds_create)
return context
def get(self, request, *args, **kwargs):
if request.user.is_authenticated():
return redirect(reverse_lazy('feeds_list'))
return super(ConnectView, self).get(request, *args, **kwargs)
def form_valid(self, form):
obj, created = form.save()
user = authenticate(username=obj.username, api_token=obj.api_token)
if user is not None:
login(self.request, user)
redirect_to = self.request.GET.get('next')
if created:
redirect_to = reverse_lazy('auth_setup')
elif not redirect_to:
redirect_to = reverse_lazy('feeds_list')
<|code_end|>
using the current file's imports:
from django.views.generic import CreateView, UpdateView
from django.contrib.auth import authenticate, login
from django.core.urlresolvers import reverse_lazy, reverse
from django.shortcuts import redirect
from braces.views import LoginRequiredMixin
from .models import KipptUser
from .forms import KipptUserConnectForm, KipptUserSetupForm
and any relevant context from other files:
# Path: apps/auth/models.py
# class KipptUser(AbstractUser):
# """
# Defines our custom user model and fields
#
# """
# api_token = models.CharField(max_length=255)
# list_id = models.IntegerField(blank=True, null=True)
#
# def __unicode__(self):
# return self.username
#
# def kippt_client(self):
# return Kippt(self.username, api_token=self.api_token)
#
# Path: apps/auth/forms.py
# class KipptUserConnectForm(forms.ModelForm):
#
# class Meta:
# model = KipptUser
# fields = ('username', 'password')
#
# def __init__(self, *args, **kwargs):
# super(KipptUserConnectForm, self).__init__(*args, **kwargs)
#
# self.fields['username'].label = 'Kippt Username'
# self.fields['username'].help_text = ''
# self.fields['password'].label = 'Kippt Password or API Token'
# self.fields['password'].widget = forms.PasswordInput()
#
# def clean(self):
# username = self.cleaned_data.get('username')
# password = self.cleaned_data.get('password')
#
# kippt = Kippt(username, password=password)
# user = kippt.account()
#
# if 'message' in user:
# kippt = Kippt(username, api_token=password)
# user = kippt.account()
#
# if 'message' in user:
# raise forms.ValidationError(user['message'])
#
# self.cleaned_data['api_token'] = user['api_token']
#
# return self.cleaned_data
#
# def save(self, commit=True):
# user, created = KipptUser.objects.get_or_create(
# username=self.cleaned_data['username'],
# )
#
# user.api_token = self.cleaned_data['api_token']
# user.save()
#
# if created:
# user.set_password(None)
#
# return user, created
#
# class KipptUserSetupForm(forms.ModelForm):
# list_id = forms.ChoiceField(label='Default list')
#
# class Meta:
# model = KipptUser
# fields = ('list_id',)
#
# def __init__(self, *args, **kwargs):
# super(KipptUserSetupForm, self).__init__(*args, **kwargs)
#
# kippt = self.instance.kippt_client()
# meta, lists = kippt.lists()
#
# LIST_CHOICES = [('', 'Choose a list to store feed items')]
#
# for kippt_list in lists:
# LIST_CHOICES.append((kippt_list['id'], kippt_list['title']))
#
# self.fields['list_id'].choices = LIST_CHOICES
. Output only the next line. | return redirect(redirect_to) |
Next line prediction: <|code_start|>
class FeedAdmin(admin.ModelAdmin):
list_display = ('feed_url', 'created_by')
class FeedEntryAdmin(admin.ModelAdmin):
<|code_end|>
. Use current file imports:
(from django.contrib import admin
from .models import Feed, FeedEntry)
and context including class names, function names, or small code snippets from other files:
# Path: apps/feeds/models.py
# class Feed(models.Model):
# feed_url = models.URLField()
# list_id = models.IntegerField(blank=True, null=True)
# created_by = models.ForeignKey(settings.AUTH_USER_MODEL)
#
# date_created = models.DateTimeField(auto_now_add=True)
# date_modified = models.DateTimeField(auto_now=True)
#
# class Meta:
# unique_together = ('feed_url', 'created_by')
#
# def __unicode__(self):
# return self.feed_url
#
# class FeedEntry(models.Model):
# title = models.CharField(max_length=255, blank=True)
# summary = models.TextField(blank=True)
# link = models.URLField()
# feed = models.ForeignKey(Feed)
#
# added_to_kippt = models.BooleanField(default=False)
# date_added_to_kippt = models.DateTimeField(blank=True, null=True)
#
# date_created = models.DateTimeField(auto_now_add=True)
# date_modified = models.DateTimeField(auto_now=True)
#
# class Meta:
# verbose_name_plural = 'feed entries'
#
# def __unicode__(self):
# return self.title
. Output only the next line. | list_display = ('title', 'link', 'feed', |
Given the following code snippet before the placeholder: <|code_start|>
class FeedAdmin(admin.ModelAdmin):
list_display = ('feed_url', 'created_by')
class FeedEntryAdmin(admin.ModelAdmin):
<|code_end|>
, predict the next line using imports from the current file:
from django.contrib import admin
from .models import Feed, FeedEntry
and context including class names, function names, and sometimes code from other files:
# Path: apps/feeds/models.py
# class Feed(models.Model):
# feed_url = models.URLField()
# list_id = models.IntegerField(blank=True, null=True)
# created_by = models.ForeignKey(settings.AUTH_USER_MODEL)
#
# date_created = models.DateTimeField(auto_now_add=True)
# date_modified = models.DateTimeField(auto_now=True)
#
# class Meta:
# unique_together = ('feed_url', 'created_by')
#
# def __unicode__(self):
# return self.feed_url
#
# class FeedEntry(models.Model):
# title = models.CharField(max_length=255, blank=True)
# summary = models.TextField(blank=True)
# link = models.URLField()
# feed = models.ForeignKey(Feed)
#
# added_to_kippt = models.BooleanField(default=False)
# date_added_to_kippt = models.DateTimeField(blank=True, null=True)
#
# date_created = models.DateTimeField(auto_now_add=True)
# date_modified = models.DateTimeField(auto_now=True)
#
# class Meta:
# verbose_name_plural = 'feed entries'
#
# def __unicode__(self):
# return self.title
. Output only the next line. | list_display = ('title', 'link', 'feed', |
Using the snippet: <|code_start|>
class FeedCreateForm(forms.ModelForm):
list_id = forms.ChoiceField(label='List (optional)', required=False)
class Meta:
model = Feed
fields = ('feed_url', 'list_id')
def __init__(self, user, *args, **kwargs):
super(FeedCreateForm, self).__init__(*args, **kwargs)
self.user = user
kippt = user.kippt_client()
meta, lists = kippt.lists()
LIST_CHOICES = [('', 'Choose a list to store feed items')]
for kippt_list in lists:
LIST_CHOICES.append((kippt_list['id'], kippt_list['title']))
self.fields['list_id'].choices = LIST_CHOICES
def clean_feed_url(self):
feed_url = self.cleaned_data.get('feed_url')
<|code_end|>
, determine the next line of code. You have imports:
from django import forms
from django.conf import settings
from djpubsubhubbub.models import Subscription
from .models import Feed
import feedparser
and context (class names, function names, or code) available:
# Path: apps/feeds/models.py
# class Feed(models.Model):
# feed_url = models.URLField()
# list_id = models.IntegerField(blank=True, null=True)
# created_by = models.ForeignKey(settings.AUTH_USER_MODEL)
#
# date_created = models.DateTimeField(auto_now_add=True)
# date_modified = models.DateTimeField(auto_now=True)
#
# class Meta:
# unique_together = ('feed_url', 'created_by')
#
# def __unicode__(self):
# return self.feed_url
. Output only the next line. | feed = feedparser.parse(feed_url) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.