Instruction stringlengths 362 7.83k | output_code stringlengths 1 945 |
|---|---|
Here is a snippet: <|code_start|>
class HomeView(View):
http_method_names = [u'get']
def get(self, request):
response = render(request, 'home.html')
return response
def renderAdvertisements(request):
queryset = Product.objects.all()
<|code_end|>
. Write the next line using the current file imports:
from django.shortcuts import render
from django.views.generic import View
from products.models import Product
and context from other files:
# Path: products/models.py
# class Product(models.Model):
# """docstring for Products."""
#
# product_name = models.CharField(
# help_text=_("Nome completo do Produto"),
# verbose_name=_("Nome do produto"),
# max_length=100, null=False, blank=False
# )
# category_id = models.ForeignKey(ProductCategory)
# stock_quantity = models.PositiveIntegerField(
# help_text=_("Quantidade do produto em estoque"),
# verbose_name=_("Quantidade do produto"),
# null=False, blank=False
# )
# price = models.FloatField(
# help_text=_("Preço atual do produto"),
# verbose_name=_("Preço do produto"),
# null=False, blank=False,
# validators=[MinValueValidator(0.1)],
# )
# weight = models.FloatField(
# default=0,
# help_text=_("Peso do produto atual"),
# verbose_name=_("Peso do produto"),
# null=False, blank=False,
# validators=[MinValueValidator(0.1)],
# )
# width = models.PositiveIntegerField(
# default=500,
# help_text=_("Largura da Imagem"),
# verbose_name=_("Largura da Imagem")
# )
# height = models.PositiveIntegerField(
# default=500,
# help_text=_("Altura da Imagem"),
# verbose_name=_("Altura da Imagem")
# )
# PRODUCT_TYPES = (
# ('PAD', 'Anuncio'), # PAD, Product Advertising
# ('STD', 'Padrao'), # STD, Standard Product
# )
# product_type = models.CharField(default='Padrao', max_length=3,
# choices=PRODUCT_TYPES
# )
# illustration = models.ImageField(null=False, blank=False,
# width_field='width',
# height_field='height',
# help_text=_("Ilustração"),
# verbose_name=_("Imagem"),
# )
#
# class Meta:
# verbose_name = _('Produto')
# verbose_name_plural = ('Produtos')
# ordering = ('product_name', 'category_id', 'stock_quantity', 'price')
#
# def save(self, *args, **kwargs):
# image = Image.open(self.illustration)
# output = BytesIO()
#
# image = image.resize((500, 500))
# image.save(output, format='PNG', quality=100)
# output.seek(0)
#
# self.illustration = InMemoryUploadedFile(output, 'ImageField',
# "%s.png" % self
# .illustration
# .name.split('.')[0],
# 'image/jpeg',
# sys.getsizeof(output),
# None)
#
# super(Product, self).save(*args, **kwargs)
#
# def __str__(self):
# return self.product_name
, which may include functions, classes, or code. Output only the next line. | context = { |
Given the code snippet: <|code_start|>
class CartManagement(View):
def addToCart(request, product_id, quantity):
product = Product.objects.get(id=product_id)
cart = Cart(request)
cart.add(product, product.price, quantity)
@csrf_exempt
def removeFromCart(request, product_id):
product = Product.objects.get(id=product_id)
cart = Cart(request)
cart.remove(product)
def cartDetailView(request):
return render_to_response('cartDetail.html', dict(cart=Cart(request)))
@csrf_exempt
def removeFromCartView(request, product_id):
<|code_end|>
, generate the next line using the imports in this file:
from cart.cart import Cart
from django.shortcuts import redirect
from django.shortcuts import render_to_response
from django.views.decorators.csrf import csrf_exempt
from django.views.generic import View
from products.models import Product
and context (functions, classes, or occasionally code) from other files:
# Path: cart/cart.py
# class Cart:
# def __init__(self, request):
# cart_id = request.session.get(CART_ID)
# if cart_id:
# try:
# cart = models.Cart.objects.get(id=cart_id, checked_out=False)
# except models.Cart.DoesNotExist:
# cart = self.new(request)
# else:
# cart = self.new(request)
# self.cart = cart
#
# def __iter__(self):
# for item in self.cart.item_set.all():
# yield item
#
# def new(self, request):
# cart = models.Cart(creation_date=datetime.datetime.now())
# cart.save()
# request.session[CART_ID] = cart.id
# return cart
#
# def add(self, product, unit_price, quantity=1):
# try:
# item = models.Item.objects.get(
# cart=self.cart,
# product=product,
# )
# except models.Item.DoesNotExist:
# item = models.Item()
# item.cart = self.cart
# item.product = product
# item.unit_price = unit_price
# item.quantity = quantity
# item.save()
# else:
# item.unit_price = unit_price
# item.quantity += int(quantity)
# item.save()
#
# def remove(self, product):
# try:
# item = models.Item.objects.get(
# cart=self.cart,
# product=product,
# )
# except models.Item.DoesNotExist:
# raise ItemDoesNotExist
# else:
# item.delete()
#
# def update(self, product, quantity, unit_price=None):
# try:
# item = models.Item.objects.get(
# cart=self.cart,
# product=product,
# )
# except models.Item.DoesNotExist:
# raise ItemDoesNotExist
# else:
# if quantity == 0:
# item.delete()
# else:
# item.unit_price = unit_price
# item.quantity = int(quantity)
# item.save()
#
# def count(self):
# result = 0
# for item in self.cart.item_set.all():
# result += 1 * item.quantity
# return result
#
# def summary(self):
# result = 0
# for item in self.cart.item_set.all():
# result += item.total_price
# return result
#
# def clear(self):
# for item in self.cart.item_set.all():
# item.delete()
#
# Path: products/models.py
# class Product(models.Model):
# """docstring for Products."""
#
# product_name = models.CharField(
# help_text=_("Nome completo do Produto"),
# verbose_name=_("Nome do produto"),
# max_length=100, null=False, blank=False
# )
# category_id = models.ForeignKey(ProductCategory)
# stock_quantity = models.PositiveIntegerField(
# help_text=_("Quantidade do produto em estoque"),
# verbose_name=_("Quantidade do produto"),
# null=False, blank=False
# )
# price = models.FloatField(
# help_text=_("Preço atual do produto"),
# verbose_name=_("Preço do produto"),
# null=False, blank=False,
# validators=[MinValueValidator(0.1)],
# )
# weight = models.FloatField(
# default=0,
# help_text=_("Peso do produto atual"),
# verbose_name=_("Peso do produto"),
# null=False, blank=False,
# validators=[MinValueValidator(0.1)],
# )
# width = models.PositiveIntegerField(
# default=500,
# help_text=_("Largura da Imagem"),
# verbose_name=_("Largura da Imagem")
# )
# height = models.PositiveIntegerField(
# default=500,
# help_text=_("Altura da Imagem"),
# verbose_name=_("Altura da Imagem")
# )
# PRODUCT_TYPES = (
# ('PAD', 'Anuncio'), # PAD, Product Advertising
# ('STD', 'Padrao'), # STD, Standard Product
# )
# product_type = models.CharField(default='Padrao', max_length=3,
# choices=PRODUCT_TYPES
# )
# illustration = models.ImageField(null=False, blank=False,
# width_field='width',
# height_field='height',
# help_text=_("Ilustração"),
# verbose_name=_("Imagem"),
# )
#
# class Meta:
# verbose_name = _('Produto')
# verbose_name_plural = ('Produtos')
# ordering = ('product_name', 'category_id', 'stock_quantity', 'price')
#
# def save(self, *args, **kwargs):
# image = Image.open(self.illustration)
# output = BytesIO()
#
# image = image.resize((500, 500))
# image.save(output, format='PNG', quality=100)
# output.seek(0)
#
# self.illustration = InMemoryUploadedFile(output, 'ImageField',
# "%s.png" % self
# .illustration
# .name.split('.')[0],
# 'image/jpeg',
# sys.getsizeof(output),
# None)
#
# super(Product, self).save(*args, **kwargs)
#
# def __str__(self):
# return self.product_name
. Output only the next line. | CartManagement.removeFromCart(request, product_id) |
Predict the next line after this snippet: <|code_start|>
class CartManagement(View):
def addToCart(request, product_id, quantity):
product = Product.objects.get(id=product_id)
cart = Cart(request)
cart.add(product, product.price, quantity)
@csrf_exempt
def removeFromCart(request, product_id):
product = Product.objects.get(id=product_id)
cart = Cart(request)
cart.remove(product)
def cartDetailView(request):
return render_to_response('cartDetail.html', dict(cart=Cart(request)))
@csrf_exempt
def removeFromCartView(request, product_id):
<|code_end|>
using the current file's imports:
from cart.cart import Cart
from django.shortcuts import redirect
from django.shortcuts import render_to_response
from django.views.decorators.csrf import csrf_exempt
from django.views.generic import View
from products.models import Product
and any relevant context from other files:
# Path: cart/cart.py
# class Cart:
# def __init__(self, request):
# cart_id = request.session.get(CART_ID)
# if cart_id:
# try:
# cart = models.Cart.objects.get(id=cart_id, checked_out=False)
# except models.Cart.DoesNotExist:
# cart = self.new(request)
# else:
# cart = self.new(request)
# self.cart = cart
#
# def __iter__(self):
# for item in self.cart.item_set.all():
# yield item
#
# def new(self, request):
# cart = models.Cart(creation_date=datetime.datetime.now())
# cart.save()
# request.session[CART_ID] = cart.id
# return cart
#
# def add(self, product, unit_price, quantity=1):
# try:
# item = models.Item.objects.get(
# cart=self.cart,
# product=product,
# )
# except models.Item.DoesNotExist:
# item = models.Item()
# item.cart = self.cart
# item.product = product
# item.unit_price = unit_price
# item.quantity = quantity
# item.save()
# else:
# item.unit_price = unit_price
# item.quantity += int(quantity)
# item.save()
#
# def remove(self, product):
# try:
# item = models.Item.objects.get(
# cart=self.cart,
# product=product,
# )
# except models.Item.DoesNotExist:
# raise ItemDoesNotExist
# else:
# item.delete()
#
# def update(self, product, quantity, unit_price=None):
# try:
# item = models.Item.objects.get(
# cart=self.cart,
# product=product,
# )
# except models.Item.DoesNotExist:
# raise ItemDoesNotExist
# else:
# if quantity == 0:
# item.delete()
# else:
# item.unit_price = unit_price
# item.quantity = int(quantity)
# item.save()
#
# def count(self):
# result = 0
# for item in self.cart.item_set.all():
# result += 1 * item.quantity
# return result
#
# def summary(self):
# result = 0
# for item in self.cart.item_set.all():
# result += item.total_price
# return result
#
# def clear(self):
# for item in self.cart.item_set.all():
# item.delete()
#
# Path: products/models.py
# class Product(models.Model):
# """docstring for Products."""
#
# product_name = models.CharField(
# help_text=_("Nome completo do Produto"),
# verbose_name=_("Nome do produto"),
# max_length=100, null=False, blank=False
# )
# category_id = models.ForeignKey(ProductCategory)
# stock_quantity = models.PositiveIntegerField(
# help_text=_("Quantidade do produto em estoque"),
# verbose_name=_("Quantidade do produto"),
# null=False, blank=False
# )
# price = models.FloatField(
# help_text=_("Preço atual do produto"),
# verbose_name=_("Preço do produto"),
# null=False, blank=False,
# validators=[MinValueValidator(0.1)],
# )
# weight = models.FloatField(
# default=0,
# help_text=_("Peso do produto atual"),
# verbose_name=_("Peso do produto"),
# null=False, blank=False,
# validators=[MinValueValidator(0.1)],
# )
# width = models.PositiveIntegerField(
# default=500,
# help_text=_("Largura da Imagem"),
# verbose_name=_("Largura da Imagem")
# )
# height = models.PositiveIntegerField(
# default=500,
# help_text=_("Altura da Imagem"),
# verbose_name=_("Altura da Imagem")
# )
# PRODUCT_TYPES = (
# ('PAD', 'Anuncio'), # PAD, Product Advertising
# ('STD', 'Padrao'), # STD, Standard Product
# )
# product_type = models.CharField(default='Padrao', max_length=3,
# choices=PRODUCT_TYPES
# )
# illustration = models.ImageField(null=False, blank=False,
# width_field='width',
# height_field='height',
# help_text=_("Ilustração"),
# verbose_name=_("Imagem"),
# )
#
# class Meta:
# verbose_name = _('Produto')
# verbose_name_plural = ('Produtos')
# ordering = ('product_name', 'category_id', 'stock_quantity', 'price')
#
# def save(self, *args, **kwargs):
# image = Image.open(self.illustration)
# output = BytesIO()
#
# image = image.resize((500, 500))
# image.save(output, format='PNG', quality=100)
# output.seek(0)
#
# self.illustration = InMemoryUploadedFile(output, 'ImageField',
# "%s.png" % self
# .illustration
# .name.split('.')[0],
# 'image/jpeg',
# sys.getsizeof(output),
# None)
#
# super(Product, self).save(*args, **kwargs)
#
# def __str__(self):
# return self.product_name
. Output only the next line. | CartManagement.removeFromCart(request, product_id) |
Given snippet: <|code_start|>
class DefaultPermission(permissions.IsAuthenticated):
def has_permission(self, request, view):
permission = super().has_permission(request, view)
try:
if permission and request.user.is_active:
permission = True
except ObjectDoesNotExist:
permission = False
return permission
class CustomerUserPermissions(permissions.BasePermission):
def __init__(self):
self.permission = False
self.request = None
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from rest_framework import permissions
from django.core.exceptions import ObjectDoesNotExist
from .models import CreditCard, ShippingAddress
and context:
# Path: users/models.py
# class CreditCard(models.Model):
# """docstring for CreditCard"""
# user = models.ForeignKey(CustomerUser)
# owner_name = models.CharField(
# help_text=_("Preencha como está no " + credit_card_verbose_name),
# verbose_name=_("Nome do Titular do " + credit_card_verbose_name),
# max_length=256)
# card_number = models.CharField(
# verbose_name=_("Número do " + credit_card_verbose_name),
# max_length=16)
# security_code = models.CharField(
# verbose_name=_("Código de Segurança do " + credit_card_verbose_name),
# max_length=3)
# expire_date = models.DateField(
# verbose_name=_("Validade do " + credit_card_verbose_name))
# provider = models.CharField(
# verbose_name=_("Bandeira do " + credit_card_verbose_name),
# max_length=20)
#
# class Meta:
# verbose_name = _(credit_card_verbose_name)
# verbose_name_plural = _(credit_card_verbose_name_plural)
#
# def __str__(self):
# return ("************" + self.card_number[-4:])
#
# class ShippingAddress(models.Model):
# """docstring for ShippingAddress"""
# customer = models.ForeignKey(CustomerUser)
# country = models.CharField(
# help_text=_(
# "Preencha com o nome completo do país"
# " onde esse endereço se encontra."),
# verbose_name=_("País"),
# max_length=50)
# state = models.CharField(
# help_text=_("Estado ou província onde esse estado se encontra."),
# verbose_name=_("Estado"),
# max_length=50)
# city = models.CharField(
# help_text=_("Cidade onde esse endereço se encontra."),
# verbose_name=_("Cidade"),
# max_length=50)
# zip_code = models.CharField(
# help_text=_("Código de Endereço Postal"),
# verbose_name=_("CEP"),
# max_length=10, null=True, blank=True)
# address = models.CharField(
# help_text=_("Endereço Postal"),
# verbose_name=_("Endereço"),
# max_length=256)
# reference = models.CharField(
# help_text=_(
# "Ponto de Referência nas redondezas."
# " Ex: 'Ao lado da Farmécia'"),
# verbose_name=_("Ponto de Referência."),
# max_length=256, null=True, blank=True)
#
# class Meta:
# verbose_name = _('Endereço para Envio')
# verbose_name_plural = _('Endereços para Envio')
# ordering = ('country', 'state', 'city', 'zip_code')
#
# def __str__(self):
# return self.zip_code + ' - ' + self.address
which might include code, classes, or functions. Output only the next line. | self.user_id = '' |
Given snippet: <|code_start|> def has_permission(self, request, view):
if request.user.is_anonymous:
return False
elif request.user.is_superuser:
return True
elif 'credit_cards' in request.path:
self.request = request
self.user_id = str(self.request.user.id)
self.card_id = self.request.path.split('/credit_cards/')[1][:-1]
if self.request.method == 'POST':
self.permission = True
try:
card = CreditCard.objects.filter(pk=int(self.card_id))
self.card_user_id = str(card[0].user.pk)
except:
pass
if self.user_id == self.card_user_id:
self.authorized_user = True
if self.authorized_user:
self.permission = True
elif 'shipping_addresses' in request.path:
self.request = request
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from rest_framework import permissions
from django.core.exceptions import ObjectDoesNotExist
from .models import CreditCard, ShippingAddress
and context:
# Path: users/models.py
# class CreditCard(models.Model):
# """docstring for CreditCard"""
# user = models.ForeignKey(CustomerUser)
# owner_name = models.CharField(
# help_text=_("Preencha como está no " + credit_card_verbose_name),
# verbose_name=_("Nome do Titular do " + credit_card_verbose_name),
# max_length=256)
# card_number = models.CharField(
# verbose_name=_("Número do " + credit_card_verbose_name),
# max_length=16)
# security_code = models.CharField(
# verbose_name=_("Código de Segurança do " + credit_card_verbose_name),
# max_length=3)
# expire_date = models.DateField(
# verbose_name=_("Validade do " + credit_card_verbose_name))
# provider = models.CharField(
# verbose_name=_("Bandeira do " + credit_card_verbose_name),
# max_length=20)
#
# class Meta:
# verbose_name = _(credit_card_verbose_name)
# verbose_name_plural = _(credit_card_verbose_name_plural)
#
# def __str__(self):
# return ("************" + self.card_number[-4:])
#
# class ShippingAddress(models.Model):
# """docstring for ShippingAddress"""
# customer = models.ForeignKey(CustomerUser)
# country = models.CharField(
# help_text=_(
# "Preencha com o nome completo do país"
# " onde esse endereço se encontra."),
# verbose_name=_("País"),
# max_length=50)
# state = models.CharField(
# help_text=_("Estado ou província onde esse estado se encontra."),
# verbose_name=_("Estado"),
# max_length=50)
# city = models.CharField(
# help_text=_("Cidade onde esse endereço se encontra."),
# verbose_name=_("Cidade"),
# max_length=50)
# zip_code = models.CharField(
# help_text=_("Código de Endereço Postal"),
# verbose_name=_("CEP"),
# max_length=10, null=True, blank=True)
# address = models.CharField(
# help_text=_("Endereço Postal"),
# verbose_name=_("Endereço"),
# max_length=256)
# reference = models.CharField(
# help_text=_(
# "Ponto de Referência nas redondezas."
# " Ex: 'Ao lado da Farmécia'"),
# verbose_name=_("Ponto de Referência."),
# max_length=256, null=True, blank=True)
#
# class Meta:
# verbose_name = _('Endereço para Envio')
# verbose_name_plural = _('Endereços para Envio')
# ordering = ('country', 'state', 'city', 'zip_code')
#
# def __str__(self):
# return self.zip_code + ' - ' + self.address
which might include code, classes, or functions. Output only the next line. | self.user_id = str(self.request.user.id) |
Here is a snippet: <|code_start|>
def productIndexView(request):
queryset = Product.objects.all()
context = {
"products": queryset,
}
return render(request, "productIndex.html", context)
<|code_end|>
. Write the next line using the current file imports:
from cart.views import CartManagement
from django.http import Http404
from django.shortcuts import redirect, render
from .models import (Product, ProductCategory)
and context from other files:
# Path: cart/views.py
# class CartManagement(View):
#
# def addToCart(request, product_id, quantity):
# product = Product.objects.get(id=product_id)
# cart = Cart(request)
# cart.add(product, product.price, quantity)
#
# @csrf_exempt
# def removeFromCart(request, product_id):
# product = Product.objects.get(id=product_id)
# cart = Cart(request)
# cart.remove(product)
#
# Path: products/models.py
# class Product(models.Model):
# """docstring for Products."""
#
# product_name = models.CharField(
# help_text=_("Nome completo do Produto"),
# verbose_name=_("Nome do produto"),
# max_length=100, null=False, blank=False
# )
# category_id = models.ForeignKey(ProductCategory)
# stock_quantity = models.PositiveIntegerField(
# help_text=_("Quantidade do produto em estoque"),
# verbose_name=_("Quantidade do produto"),
# null=False, blank=False
# )
# price = models.FloatField(
# help_text=_("Preço atual do produto"),
# verbose_name=_("Preço do produto"),
# null=False, blank=False,
# validators=[MinValueValidator(0.1)],
# )
# weight = models.FloatField(
# default=0,
# help_text=_("Peso do produto atual"),
# verbose_name=_("Peso do produto"),
# null=False, blank=False,
# validators=[MinValueValidator(0.1)],
# )
# width = models.PositiveIntegerField(
# default=500,
# help_text=_("Largura da Imagem"),
# verbose_name=_("Largura da Imagem")
# )
# height = models.PositiveIntegerField(
# default=500,
# help_text=_("Altura da Imagem"),
# verbose_name=_("Altura da Imagem")
# )
# PRODUCT_TYPES = (
# ('PAD', 'Anuncio'), # PAD, Product Advertising
# ('STD', 'Padrao'), # STD, Standard Product
# )
# product_type = models.CharField(default='Padrao', max_length=3,
# choices=PRODUCT_TYPES
# )
# illustration = models.ImageField(null=False, blank=False,
# width_field='width',
# height_field='height',
# help_text=_("Ilustração"),
# verbose_name=_("Imagem"),
# )
#
# class Meta:
# verbose_name = _('Produto')
# verbose_name_plural = ('Produtos')
# ordering = ('product_name', 'category_id', 'stock_quantity', 'price')
#
# def save(self, *args, **kwargs):
# image = Image.open(self.illustration)
# output = BytesIO()
#
# image = image.resize((500, 500))
# image.save(output, format='PNG', quality=100)
# output.seek(0)
#
# self.illustration = InMemoryUploadedFile(output, 'ImageField',
# "%s.png" % self
# .illustration
# .name.split('.')[0],
# 'image/jpeg',
# sys.getsizeof(output),
# None)
#
# super(Product, self).save(*args, **kwargs)
#
# def __str__(self):
# return self.product_name
#
# class ProductCategory(models.Model):
# """docstring for Category."""
#
# category_name = models.CharField(
# help_text=_("Nome da categoria de produtos"),
# verbose_name=_("Categoria de Produto"),
# max_length=100, null=False, blank=False
# )
# father_category = models.ForeignKey(
# 'self',
# null=True,
# blank=True
# )
#
# class Meta:
# verbose_name = _('Categoria de Produtos')
# verbose_name_plural = ('Categorias de produtos')
#
# def __str__(self):
# return self.category_name
, which may include functions, classes, or code. Output only the next line. | def productDetailView(request, product_id): |
Continue the code snippet: <|code_start|>
def productIndexView(request):
queryset = Product.objects.all()
context = {
"products": queryset,
}
return render(request, "productIndex.html", context)
def productDetailView(request, product_id):
try:
product = Product.objects.get(pk=product_id)
except Product.DoesNotExist:
raise Http404("The product does not exist")
return render(request, "productDetail.html", {'product': product})
def productFilterView(request):
products = Product.objects.all()
var_get_search = request.GET.get('search_product')
<|code_end|>
. Use current file imports:
from cart.views import CartManagement
from django.http import Http404
from django.shortcuts import redirect, render
from .models import (Product, ProductCategory)
and context (classes, functions, or code) from other files:
# Path: cart/views.py
# class CartManagement(View):
#
# def addToCart(request, product_id, quantity):
# product = Product.objects.get(id=product_id)
# cart = Cart(request)
# cart.add(product, product.price, quantity)
#
# @csrf_exempt
# def removeFromCart(request, product_id):
# product = Product.objects.get(id=product_id)
# cart = Cart(request)
# cart.remove(product)
#
# Path: products/models.py
# class Product(models.Model):
# """docstring for Products."""
#
# product_name = models.CharField(
# help_text=_("Nome completo do Produto"),
# verbose_name=_("Nome do produto"),
# max_length=100, null=False, blank=False
# )
# category_id = models.ForeignKey(ProductCategory)
# stock_quantity = models.PositiveIntegerField(
# help_text=_("Quantidade do produto em estoque"),
# verbose_name=_("Quantidade do produto"),
# null=False, blank=False
# )
# price = models.FloatField(
# help_text=_("Preço atual do produto"),
# verbose_name=_("Preço do produto"),
# null=False, blank=False,
# validators=[MinValueValidator(0.1)],
# )
# weight = models.FloatField(
# default=0,
# help_text=_("Peso do produto atual"),
# verbose_name=_("Peso do produto"),
# null=False, blank=False,
# validators=[MinValueValidator(0.1)],
# )
# width = models.PositiveIntegerField(
# default=500,
# help_text=_("Largura da Imagem"),
# verbose_name=_("Largura da Imagem")
# )
# height = models.PositiveIntegerField(
# default=500,
# help_text=_("Altura da Imagem"),
# verbose_name=_("Altura da Imagem")
# )
# PRODUCT_TYPES = (
# ('PAD', 'Anuncio'), # PAD, Product Advertising
# ('STD', 'Padrao'), # STD, Standard Product
# )
# product_type = models.CharField(default='Padrao', max_length=3,
# choices=PRODUCT_TYPES
# )
# illustration = models.ImageField(null=False, blank=False,
# width_field='width',
# height_field='height',
# help_text=_("Ilustração"),
# verbose_name=_("Imagem"),
# )
#
# class Meta:
# verbose_name = _('Produto')
# verbose_name_plural = ('Produtos')
# ordering = ('product_name', 'category_id', 'stock_quantity', 'price')
#
# def save(self, *args, **kwargs):
# image = Image.open(self.illustration)
# output = BytesIO()
#
# image = image.resize((500, 500))
# image.save(output, format='PNG', quality=100)
# output.seek(0)
#
# self.illustration = InMemoryUploadedFile(output, 'ImageField',
# "%s.png" % self
# .illustration
# .name.split('.')[0],
# 'image/jpeg',
# sys.getsizeof(output),
# None)
#
# super(Product, self).save(*args, **kwargs)
#
# def __str__(self):
# return self.product_name
#
# class ProductCategory(models.Model):
# """docstring for Category."""
#
# category_name = models.CharField(
# help_text=_("Nome da categoria de produtos"),
# verbose_name=_("Categoria de Produto"),
# max_length=100, null=False, blank=False
# )
# father_category = models.ForeignKey(
# 'self',
# null=True,
# blank=True
# )
#
# class Meta:
# verbose_name = _('Categoria de Produtos')
# verbose_name_plural = ('Categorias de produtos')
#
# def __str__(self):
# return self.category_name
. Output only the next line. | if var_get_search is not None: |
Given snippet: <|code_start|>
class ProductSerializerDefault(serializers.ModelSerializer):
class Meta:
model = Product
fields = "__all__"
class ProductSerializerPOST(serializers.ModelSerializer):
class Meta:
model = Product
fields = ('pk', 'product_name', 'category_id', 'stock_quantity',
'price', 'weight', 'width', 'height', 'PRODUCT_TYPES',
'product_type', 'illustration')
def create(self, validated_data):
obj = Product.objects.create(**validated_data)
return obj
class ProductCategorySerializerDefault(serializers.ModelSerializer):
class Meta:
model = ProductCategory
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from rest_framework import serializers
from .models import (
ProductCategory,
Product
)
and context:
# Path: products/models.py
# class ProductCategory(models.Model):
# """docstring for Category."""
#
# category_name = models.CharField(
# help_text=_("Nome da categoria de produtos"),
# verbose_name=_("Categoria de Produto"),
# max_length=100, null=False, blank=False
# )
# father_category = models.ForeignKey(
# 'self',
# null=True,
# blank=True
# )
#
# class Meta:
# verbose_name = _('Categoria de Produtos')
# verbose_name_plural = ('Categorias de produtos')
#
# def __str__(self):
# return self.category_name
#
# class Product(models.Model):
# """docstring for Products."""
#
# product_name = models.CharField(
# help_text=_("Nome completo do Produto"),
# verbose_name=_("Nome do produto"),
# max_length=100, null=False, blank=False
# )
# category_id = models.ForeignKey(ProductCategory)
# stock_quantity = models.PositiveIntegerField(
# help_text=_("Quantidade do produto em estoque"),
# verbose_name=_("Quantidade do produto"),
# null=False, blank=False
# )
# price = models.FloatField(
# help_text=_("Preço atual do produto"),
# verbose_name=_("Preço do produto"),
# null=False, blank=False,
# validators=[MinValueValidator(0.1)],
# )
# weight = models.FloatField(
# default=0,
# help_text=_("Peso do produto atual"),
# verbose_name=_("Peso do produto"),
# null=False, blank=False,
# validators=[MinValueValidator(0.1)],
# )
# width = models.PositiveIntegerField(
# default=500,
# help_text=_("Largura da Imagem"),
# verbose_name=_("Largura da Imagem")
# )
# height = models.PositiveIntegerField(
# default=500,
# help_text=_("Altura da Imagem"),
# verbose_name=_("Altura da Imagem")
# )
# PRODUCT_TYPES = (
# ('PAD', 'Anuncio'), # PAD, Product Advertising
# ('STD', 'Padrao'), # STD, Standard Product
# )
# product_type = models.CharField(default='Padrao', max_length=3,
# choices=PRODUCT_TYPES
# )
# illustration = models.ImageField(null=False, blank=False,
# width_field='width',
# height_field='height',
# help_text=_("Ilustração"),
# verbose_name=_("Imagem"),
# )
#
# class Meta:
# verbose_name = _('Produto')
# verbose_name_plural = ('Produtos')
# ordering = ('product_name', 'category_id', 'stock_quantity', 'price')
#
# def save(self, *args, **kwargs):
# image = Image.open(self.illustration)
# output = BytesIO()
#
# image = image.resize((500, 500))
# image.save(output, format='PNG', quality=100)
# output.seek(0)
#
# self.illustration = InMemoryUploadedFile(output, 'ImageField',
# "%s.png" % self
# .illustration
# .name.split('.')[0],
# 'image/jpeg',
# sys.getsizeof(output),
# None)
#
# super(Product, self).save(*args, **kwargs)
#
# def __str__(self):
# return self.product_name
which might include code, classes, or functions. Output only the next line. | fields = "__all__" |
Using the snippet: <|code_start|># Remenber to use test_TESTNAME.py
@pytest.mark.django_db
def test_customer_user():
user = CustomerUser()
<|code_end|>
, determine the next line of code. You have imports:
import pytest
from products.models import ProductCategory
from cart.models import (
Cart,
# ItemManager,
# Item
)
from users.models import (CustomerUser, CreditCard)
and context (class names, function names, or code) available:
# Path: products/models.py
# class ProductCategory(models.Model):
# """docstring for Category."""
#
# category_name = models.CharField(
# help_text=_("Nome da categoria de produtos"),
# verbose_name=_("Categoria de Produto"),
# max_length=100, null=False, blank=False
# )
# father_category = models.ForeignKey(
# 'self',
# null=True,
# blank=True
# )
#
# class Meta:
# verbose_name = _('Categoria de Produtos')
# verbose_name_plural = ('Categorias de produtos')
#
# def __str__(self):
# return self.category_name
#
# Path: cart/models.py
# class Cart(models.Model):
# creation_date = models.DateTimeField(default=datetime.now,
# verbose_name=_('creation date'))
# checked_out = models.BooleanField(default=False,
# verbose_name=_('checked out'))
#
# class Meta:
# verbose_name = _('cart')
# verbose_name_plural = _('carts')
# ordering = ('-creation_date',)
#
# def __str__(self):
# return str(self.creation_date)
#
# Path: users/models.py
# class CustomerUser(User):
# """docstring for CustomerUser"""
# cellphone = models.CharField(
# help_text=_("Número de telefone. Preencha apenas com númreos."),
# verbose_name=_("Telefone Celular"),
# max_length=15, null=False, blank=False)
#
# phone_number = models.CharField(
# help_text=_("Número de telefone. Preencha apenas com númreos."),
# verbose_name=_("Teledone Fixo"),
# max_length=15, null=True, blank=False)
#
# class Meta:
# verbose_name = _('Cliente')
# verbose_name_plural = _('Clientes')
#
# def __str__(self):
# return (self.first_name + " " + self.last_name)
#
# class CreditCard(models.Model):
# """docstring for CreditCard"""
# user = models.ForeignKey(CustomerUser)
# owner_name = models.CharField(
# help_text=_("Preencha como está no " + credit_card_verbose_name),
# verbose_name=_("Nome do Titular do " + credit_card_verbose_name),
# max_length=256)
# card_number = models.CharField(
# verbose_name=_("Número do " + credit_card_verbose_name),
# max_length=16)
# security_code = models.CharField(
# verbose_name=_("Código de Segurança do " + credit_card_verbose_name),
# max_length=3)
# expire_date = models.DateField(
# verbose_name=_("Validade do " + credit_card_verbose_name))
# provider = models.CharField(
# verbose_name=_("Bandeira do " + credit_card_verbose_name),
# max_length=20)
#
# class Meta:
# verbose_name = _(credit_card_verbose_name)
# verbose_name_plural = _(credit_card_verbose_name_plural)
#
# def __str__(self):
# return ("************" + self.card_number[-4:])
. Output only the next line. | user.name = 'Gabriela' |
Given the code snippet: <|code_start|>#
# category.save()
#
# product = Product()
#
# product.product_name = "Metcon"
# product.category_id = category
# product.stock_quantity = 10
# product.price = 459
# product.weight = 10
# product.width = 20
# product.height = 10
# product.product_type = "Padrao"
#
# product.save()
#
# products = Product.objects.all().count()
#
# assert products >= 1
# product.delete()
@pytest.mark.django_db
def test_cart():
cart = Cart()
cart.creation_date = '2010-02-12'
cart.checked_out = False
<|code_end|>
, generate the next line using the imports in this file:
import pytest
from products.models import ProductCategory
from cart.models import (
Cart,
# ItemManager,
# Item
)
from users.models import (CustomerUser, CreditCard)
and context (functions, classes, or occasionally code) from other files:
# Path: products/models.py
# class ProductCategory(models.Model):
# """docstring for Category."""
#
# category_name = models.CharField(
# help_text=_("Nome da categoria de produtos"),
# verbose_name=_("Categoria de Produto"),
# max_length=100, null=False, blank=False
# )
# father_category = models.ForeignKey(
# 'self',
# null=True,
# blank=True
# )
#
# class Meta:
# verbose_name = _('Categoria de Produtos')
# verbose_name_plural = ('Categorias de produtos')
#
# def __str__(self):
# return self.category_name
#
# Path: cart/models.py
# class Cart(models.Model):
# creation_date = models.DateTimeField(default=datetime.now,
# verbose_name=_('creation date'))
# checked_out = models.BooleanField(default=False,
# verbose_name=_('checked out'))
#
# class Meta:
# verbose_name = _('cart')
# verbose_name_plural = _('carts')
# ordering = ('-creation_date',)
#
# def __str__(self):
# return str(self.creation_date)
#
# Path: users/models.py
# class CustomerUser(User):
# """docstring for CustomerUser"""
# cellphone = models.CharField(
# help_text=_("Número de telefone. Preencha apenas com númreos."),
# verbose_name=_("Telefone Celular"),
# max_length=15, null=False, blank=False)
#
# phone_number = models.CharField(
# help_text=_("Número de telefone. Preencha apenas com númreos."),
# verbose_name=_("Teledone Fixo"),
# max_length=15, null=True, blank=False)
#
# class Meta:
# verbose_name = _('Cliente')
# verbose_name_plural = _('Clientes')
#
# def __str__(self):
# return (self.first_name + " " + self.last_name)
#
# class CreditCard(models.Model):
# """docstring for CreditCard"""
# user = models.ForeignKey(CustomerUser)
# owner_name = models.CharField(
# help_text=_("Preencha como está no " + credit_card_verbose_name),
# verbose_name=_("Nome do Titular do " + credit_card_verbose_name),
# max_length=256)
# card_number = models.CharField(
# verbose_name=_("Número do " + credit_card_verbose_name),
# max_length=16)
# security_code = models.CharField(
# verbose_name=_("Código de Segurança do " + credit_card_verbose_name),
# max_length=3)
# expire_date = models.DateField(
# verbose_name=_("Validade do " + credit_card_verbose_name))
# provider = models.CharField(
# verbose_name=_("Bandeira do " + credit_card_verbose_name),
# max_length=20)
#
# class Meta:
# verbose_name = _(credit_card_verbose_name)
# verbose_name_plural = _(credit_card_verbose_name_plural)
#
# def __str__(self):
# return ("************" + self.card_number[-4:])
. Output only the next line. | cart.save() |
Given snippet: <|code_start|># Remenber to use test_TESTNAME.py
@pytest.mark.django_db
def test_customer_user():
user = CustomerUser()
user.name = 'Gabriela'
user.last_name = 'Gama'
user.set_password('123456')
user.email = 'gaby@mail.com'
user.phone_number = '98765432'
user.cellphone = '123454657'
user.save()
count_users = CustomerUser.objects.filter(pk=user.pk).count()
assert count_users >= 1
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import pytest
from products.models import ProductCategory
from cart.models import (
Cart,
# ItemManager,
# Item
)
from users.models import (CustomerUser, CreditCard)
and context:
# Path: products/models.py
# class ProductCategory(models.Model):
# """docstring for Category."""
#
# category_name = models.CharField(
# help_text=_("Nome da categoria de produtos"),
# verbose_name=_("Categoria de Produto"),
# max_length=100, null=False, blank=False
# )
# father_category = models.ForeignKey(
# 'self',
# null=True,
# blank=True
# )
#
# class Meta:
# verbose_name = _('Categoria de Produtos')
# verbose_name_plural = ('Categorias de produtos')
#
# def __str__(self):
# return self.category_name
#
# Path: cart/models.py
# class Cart(models.Model):
# creation_date = models.DateTimeField(default=datetime.now,
# verbose_name=_('creation date'))
# checked_out = models.BooleanField(default=False,
# verbose_name=_('checked out'))
#
# class Meta:
# verbose_name = _('cart')
# verbose_name_plural = _('carts')
# ordering = ('-creation_date',)
#
# def __str__(self):
# return str(self.creation_date)
#
# Path: users/models.py
# class CustomerUser(User):
# """docstring for CustomerUser"""
# cellphone = models.CharField(
# help_text=_("Número de telefone. Preencha apenas com númreos."),
# verbose_name=_("Telefone Celular"),
# max_length=15, null=False, blank=False)
#
# phone_number = models.CharField(
# help_text=_("Número de telefone. Preencha apenas com númreos."),
# verbose_name=_("Teledone Fixo"),
# max_length=15, null=True, blank=False)
#
# class Meta:
# verbose_name = _('Cliente')
# verbose_name_plural = _('Clientes')
#
# def __str__(self):
# return (self.first_name + " " + self.last_name)
#
# class CreditCard(models.Model):
# """docstring for CreditCard"""
# user = models.ForeignKey(CustomerUser)
# owner_name = models.CharField(
# help_text=_("Preencha como está no " + credit_card_verbose_name),
# verbose_name=_("Nome do Titular do " + credit_card_verbose_name),
# max_length=256)
# card_number = models.CharField(
# verbose_name=_("Número do " + credit_card_verbose_name),
# max_length=16)
# security_code = models.CharField(
# verbose_name=_("Código de Segurança do " + credit_card_verbose_name),
# max_length=3)
# expire_date = models.DateField(
# verbose_name=_("Validade do " + credit_card_verbose_name))
# provider = models.CharField(
# verbose_name=_("Bandeira do " + credit_card_verbose_name),
# max_length=20)
#
# class Meta:
# verbose_name = _(credit_card_verbose_name)
# verbose_name_plural = _(credit_card_verbose_name_plural)
#
# def __str__(self):
# return ("************" + self.card_number[-4:])
which might include code, classes, or functions. Output only the next line. | user.delete() |
Predict the next line after this snippet: <|code_start|># Remenber to use test_TESTNAME.py
@pytest.mark.django_db
def test_customer_user():
user = CustomerUser()
user.name = 'Gabriela'
<|code_end|>
using the current file's imports:
import pytest
from products.models import ProductCategory
from cart.models import (
Cart,
# ItemManager,
# Item
)
from users.models import (CustomerUser, CreditCard)
and any relevant context from other files:
# Path: products/models.py
# class ProductCategory(models.Model):
# """docstring for Category."""
#
# category_name = models.CharField(
# help_text=_("Nome da categoria de produtos"),
# verbose_name=_("Categoria de Produto"),
# max_length=100, null=False, blank=False
# )
# father_category = models.ForeignKey(
# 'self',
# null=True,
# blank=True
# )
#
# class Meta:
# verbose_name = _('Categoria de Produtos')
# verbose_name_plural = ('Categorias de produtos')
#
# def __str__(self):
# return self.category_name
#
# Path: cart/models.py
# class Cart(models.Model):
# creation_date = models.DateTimeField(default=datetime.now,
# verbose_name=_('creation date'))
# checked_out = models.BooleanField(default=False,
# verbose_name=_('checked out'))
#
# class Meta:
# verbose_name = _('cart')
# verbose_name_plural = _('carts')
# ordering = ('-creation_date',)
#
# def __str__(self):
# return str(self.creation_date)
#
# Path: users/models.py
# class CustomerUser(User):
# """docstring for CustomerUser"""
# cellphone = models.CharField(
# help_text=_("Número de telefone. Preencha apenas com númreos."),
# verbose_name=_("Telefone Celular"),
# max_length=15, null=False, blank=False)
#
# phone_number = models.CharField(
# help_text=_("Número de telefone. Preencha apenas com númreos."),
# verbose_name=_("Teledone Fixo"),
# max_length=15, null=True, blank=False)
#
# class Meta:
# verbose_name = _('Cliente')
# verbose_name_plural = _('Clientes')
#
# def __str__(self):
# return (self.first_name + " " + self.last_name)
#
# class CreditCard(models.Model):
# """docstring for CreditCard"""
# user = models.ForeignKey(CustomerUser)
# owner_name = models.CharField(
# help_text=_("Preencha como está no " + credit_card_verbose_name),
# verbose_name=_("Nome do Titular do " + credit_card_verbose_name),
# max_length=256)
# card_number = models.CharField(
# verbose_name=_("Número do " + credit_card_verbose_name),
# max_length=16)
# security_code = models.CharField(
# verbose_name=_("Código de Segurança do " + credit_card_verbose_name),
# max_length=3)
# expire_date = models.DateField(
# verbose_name=_("Validade do " + credit_card_verbose_name))
# provider = models.CharField(
# verbose_name=_("Bandeira do " + credit_card_verbose_name),
# max_length=20)
#
# class Meta:
# verbose_name = _(credit_card_verbose_name)
# verbose_name_plural = _(credit_card_verbose_name_plural)
#
# def __str__(self):
# return ("************" + self.card_number[-4:])
. Output only the next line. | user.last_name = 'Gama' |
Continue the code snippet: <|code_start|>
app_name = UsersConfig.name
urlpatterns = [
url(r'^$', CustomerUserListView.as_view(),
name='list_view'),
url(r'^sign_up/$', CustomerUserRegistrationView.as_view(),
name='sign_up'),
url(r'^excluir_conta/$', CustomerUserDelectionView.as_view(),
name='excluir_conta'),
url(r'^(?P<id>\d+)/edit/$',
CustomerUserUpdateView.as_view(), name='edit'),
url(r'^(?P<pk>\d+)/$', CustomerUserDetailView.as_view(),
name='detail'),
url(r'^login/$', LoginView.as_view(),
name='login'),
url(r'^logout/$', LogoutView.as_view(), name='logout')
<|code_end|>
. Use current file imports:
from django.conf.urls import url
from .apps import UsersConfig
from .views import (CustomerUserRegistrationView,
CustomerUserDelectionView,
CustomerUserUpdateView,
CustomerUserListView,
CustomerUserDetailView,
LoginView,
LogoutView,)
and context (classes, functions, or code) from other files:
# Path: users/apps.py
# class UsersConfig(AppConfig):
# name = 'users'
# label = 'users'
# verbose_name = _('Usuário')
# verbose_name_plural = _('Usuários')
#
# Path: users/views.py
# class CustomerUserRegistrationView(FormView):
#
# """
# Class for CustomerUser registration view.
# """
# http_method_names = [u'get', u'post']
#
# def get(self, request):
# form = CustomerUserRegistrationForm()
# response = render(request, 'signup.html', {'form': form})
# return response
#
# def post(self, request):
# form = CustomerUserRegistrationForm(request.POST)
#
# if form.is_valid():
# user = form.save(commit=False)
# password = form.cleaned_data['password']
# user.set_password(password)
# user.save()
#
# response = redirect("/")
# else:
# response = render(request, 'signup.html', {'form': form})
#
# return response
#
# class CustomerUserDelectionView(FormView):
#
# http_method_names = [u'get', u'post']
#
# def get(self, request):
# form = CustomerUserDelectionForm()
# response = render(request, 'excluirConta.html', {'form': form})
# return response
#
# def post(self, request):
# form = CustomerUserDelectionForm(request.POST)
#
# if form.is_valid():
# data = {}
# data['password'] = request.POST['password']
#
# password = request.POST['password']
#
# # utilizando o authenticate do django
# user = authenticate(username=request.user.username,
# password=password)
# if user is not None:
# user = form.delete()
# user.delete()
# messages.sucess(request, 'Sua conta foi excluída')
# response = render(request, 'signup.html')
# return response
# else:
# pass
#
# else:
# response = render(request, 'excluirConta.html', {'form': form})
#
# return response
#
# class CustomerUserUpdateView(UpdateView):
# """
# Class for CustomerUser edit/update view implementation
# """
#
# model = CustomerUser
# slug_field = 'id'
# pk_url_kwarg = 'id'
# fields = ['username', 'first_name', 'last_name', 'email', 'password']
#
# def get_queryset(self):
# return CustomerUser.objects.all()
#
# def get(self, request, id):
# if request.user.id == int(id):
# instance = CustomerUser.objects.get(id=id)
# form = CustomerUserUpdateForm(request.POST or None,
# instance=instance)
#
# if form.is_valid():
# form.save()
# return redirect('/')
#
# response = render(request, 'edit.html', {'form': form})
# else:
# response = redirect('/')
#
# return response
#
# class CustomerUserListView(ListView):
# model = CustomerUser
#
# def get_context_data(self, **kwargs):
#
# context = super(CustomerUserListView,
# self).get_context_data(**kwargs)
# context['context_object_name'] = CustomerUser._meta.verbose_name
# context['context_object_name_plural'] = (
# CustomerUser._meta.verbose_name_plural)
# return context
#
# class CustomerUserDetailView(DetailView):
# model = CustomerUser
#
# def get_context_data(self, **kwargs):
#
# if self.request.user.id == kwargs['object'].id:
# context = super(CustomerUserDetailView,
# self).get_context_data(**kwargs)
# context['context_object_name'] = CustomerUser._meta.verbose_name
# else:
# context = redirect('/')
#
# return context
#
# class LoginView(FormView):
# """
# Class for CustomerUser login view.
# """
# http_method_names = [u'get', u'post']
#
# def get(self, request):
# if not request.user.is_authenticated:
# form = LoginForm()
# response = render(request, 'login.html', {'form': form})
# else:
# response = redirect('/')
#
# return response
#
# def post(self, request):
# form = LoginForm(request.POST)
#
# username = request.POST['username']
# password = request.POST['password']
# user = authenticate(request, username=username, password=password)
#
# if user is not None:
# login(request, user)
# response = redirect('/')
# else:
# messages.error(request, 'Nome de usuário e/ou senha incorreto(s).')
# response = render(request, 'login.html', {'form': form})
#
# return response
#
# class LogoutView(FormView):
# """
# Class for CustomerUser logout view.
# """
# http_method_names = [u'get']
#
# def get(self, request):
# if request.user.is_authenticated:
# logout(request)
# response = redirect('/')
# else:
# response = redirect('/')
#
# return response
. Output only the next line. | ] |
Predict the next line after this snippet: <|code_start|>
app_name = UsersConfig.name
urlpatterns = [
url(r'^$', CustomerUserListView.as_view(),
name='list_view'),
url(r'^sign_up/$', CustomerUserRegistrationView.as_view(),
name='sign_up'),
url(r'^excluir_conta/$', CustomerUserDelectionView.as_view(),
name='excluir_conta'),
url(r'^(?P<id>\d+)/edit/$',
CustomerUserUpdateView.as_view(), name='edit'),
url(r'^(?P<pk>\d+)/$', CustomerUserDetailView.as_view(),
name='detail'),
url(r'^login/$', LoginView.as_view(),
name='login'),
url(r'^logout/$', LogoutView.as_view(), name='logout')
<|code_end|>
using the current file's imports:
from django.conf.urls import url
from .apps import UsersConfig
from .views import (CustomerUserRegistrationView,
CustomerUserDelectionView,
CustomerUserUpdateView,
CustomerUserListView,
CustomerUserDetailView,
LoginView,
LogoutView,)
and any relevant context from other files:
# Path: users/apps.py
# class UsersConfig(AppConfig):
# name = 'users'
# label = 'users'
# verbose_name = _('Usuário')
# verbose_name_plural = _('Usuários')
#
# Path: users/views.py
# class CustomerUserRegistrationView(FormView):
#
# """
# Class for CustomerUser registration view.
# """
# http_method_names = [u'get', u'post']
#
# def get(self, request):
# form = CustomerUserRegistrationForm()
# response = render(request, 'signup.html', {'form': form})
# return response
#
# def post(self, request):
# form = CustomerUserRegistrationForm(request.POST)
#
# if form.is_valid():
# user = form.save(commit=False)
# password = form.cleaned_data['password']
# user.set_password(password)
# user.save()
#
# response = redirect("/")
# else:
# response = render(request, 'signup.html', {'form': form})
#
# return response
#
# class CustomerUserDelectionView(FormView):
#
# http_method_names = [u'get', u'post']
#
# def get(self, request):
# form = CustomerUserDelectionForm()
# response = render(request, 'excluirConta.html', {'form': form})
# return response
#
# def post(self, request):
# form = CustomerUserDelectionForm(request.POST)
#
# if form.is_valid():
# data = {}
# data['password'] = request.POST['password']
#
# password = request.POST['password']
#
# # utilizando o authenticate do django
# user = authenticate(username=request.user.username,
# password=password)
# if user is not None:
# user = form.delete()
# user.delete()
# messages.sucess(request, 'Sua conta foi excluída')
# response = render(request, 'signup.html')
# return response
# else:
# pass
#
# else:
# response = render(request, 'excluirConta.html', {'form': form})
#
# return response
#
# class CustomerUserUpdateView(UpdateView):
# """
# Class for CustomerUser edit/update view implementation
# """
#
# model = CustomerUser
# slug_field = 'id'
# pk_url_kwarg = 'id'
# fields = ['username', 'first_name', 'last_name', 'email', 'password']
#
# def get_queryset(self):
# return CustomerUser.objects.all()
#
# def get(self, request, id):
# if request.user.id == int(id):
# instance = CustomerUser.objects.get(id=id)
# form = CustomerUserUpdateForm(request.POST or None,
# instance=instance)
#
# if form.is_valid():
# form.save()
# return redirect('/')
#
# response = render(request, 'edit.html', {'form': form})
# else:
# response = redirect('/')
#
# return response
#
# class CustomerUserListView(ListView):
# model = CustomerUser
#
# def get_context_data(self, **kwargs):
#
# context = super(CustomerUserListView,
# self).get_context_data(**kwargs)
# context['context_object_name'] = CustomerUser._meta.verbose_name
# context['context_object_name_plural'] = (
# CustomerUser._meta.verbose_name_plural)
# return context
#
# class CustomerUserDetailView(DetailView):
# model = CustomerUser
#
# def get_context_data(self, **kwargs):
#
# if self.request.user.id == kwargs['object'].id:
# context = super(CustomerUserDetailView,
# self).get_context_data(**kwargs)
# context['context_object_name'] = CustomerUser._meta.verbose_name
# else:
# context = redirect('/')
#
# return context
#
# class LoginView(FormView):
# """
# Class for CustomerUser login view.
# """
# http_method_names = [u'get', u'post']
#
# def get(self, request):
# if not request.user.is_authenticated:
# form = LoginForm()
# response = render(request, 'login.html', {'form': form})
# else:
# response = redirect('/')
#
# return response
#
# def post(self, request):
# form = LoginForm(request.POST)
#
# username = request.POST['username']
# password = request.POST['password']
# user = authenticate(request, username=username, password=password)
#
# if user is not None:
# login(request, user)
# response = redirect('/')
# else:
# messages.error(request, 'Nome de usuário e/ou senha incorreto(s).')
# response = render(request, 'login.html', {'form': form})
#
# return response
#
# class LogoutView(FormView):
# """
# Class for CustomerUser logout view.
# """
# http_method_names = [u'get']
#
# def get(self, request):
# if request.user.is_authenticated:
# logout(request)
# response = redirect('/')
# else:
# response = redirect('/')
#
# return response
. Output only the next line. | ] |
Predict the next line after this snippet: <|code_start|>
app_name = UsersConfig.name
urlpatterns = [
url(r'^$', CustomerUserListView.as_view(),
<|code_end|>
using the current file's imports:
from django.conf.urls import url
from .apps import UsersConfig
from .views import (CustomerUserRegistrationView,
CustomerUserDelectionView,
CustomerUserUpdateView,
CustomerUserListView,
CustomerUserDetailView,
LoginView,
LogoutView,)
and any relevant context from other files:
# Path: users/apps.py
# class UsersConfig(AppConfig):
# name = 'users'
# label = 'users'
# verbose_name = _('Usuário')
# verbose_name_plural = _('Usuários')
#
# Path: users/views.py
# class CustomerUserRegistrationView(FormView):
#
# """
# Class for CustomerUser registration view.
# """
# http_method_names = [u'get', u'post']
#
# def get(self, request):
# form = CustomerUserRegistrationForm()
# response = render(request, 'signup.html', {'form': form})
# return response
#
# def post(self, request):
# form = CustomerUserRegistrationForm(request.POST)
#
# if form.is_valid():
# user = form.save(commit=False)
# password = form.cleaned_data['password']
# user.set_password(password)
# user.save()
#
# response = redirect("/")
# else:
# response = render(request, 'signup.html', {'form': form})
#
# return response
#
# class CustomerUserDelectionView(FormView):
#
# http_method_names = [u'get', u'post']
#
# def get(self, request):
# form = CustomerUserDelectionForm()
# response = render(request, 'excluirConta.html', {'form': form})
# return response
#
# def post(self, request):
# form = CustomerUserDelectionForm(request.POST)
#
# if form.is_valid():
# data = {}
# data['password'] = request.POST['password']
#
# password = request.POST['password']
#
# # utilizando o authenticate do django
# user = authenticate(username=request.user.username,
# password=password)
# if user is not None:
# user = form.delete()
# user.delete()
# messages.sucess(request, 'Sua conta foi excluída')
# response = render(request, 'signup.html')
# return response
# else:
# pass
#
# else:
# response = render(request, 'excluirConta.html', {'form': form})
#
# return response
#
# class CustomerUserUpdateView(UpdateView):
# """
# Class for CustomerUser edit/update view implementation
# """
#
# model = CustomerUser
# slug_field = 'id'
# pk_url_kwarg = 'id'
# fields = ['username', 'first_name', 'last_name', 'email', 'password']
#
# def get_queryset(self):
# return CustomerUser.objects.all()
#
# def get(self, request, id):
# if request.user.id == int(id):
# instance = CustomerUser.objects.get(id=id)
# form = CustomerUserUpdateForm(request.POST or None,
# instance=instance)
#
# if form.is_valid():
# form.save()
# return redirect('/')
#
# response = render(request, 'edit.html', {'form': form})
# else:
# response = redirect('/')
#
# return response
#
# class CustomerUserListView(ListView):
# model = CustomerUser
#
# def get_context_data(self, **kwargs):
#
# context = super(CustomerUserListView,
# self).get_context_data(**kwargs)
# context['context_object_name'] = CustomerUser._meta.verbose_name
# context['context_object_name_plural'] = (
# CustomerUser._meta.verbose_name_plural)
# return context
#
# class CustomerUserDetailView(DetailView):
# model = CustomerUser
#
# def get_context_data(self, **kwargs):
#
# if self.request.user.id == kwargs['object'].id:
# context = super(CustomerUserDetailView,
# self).get_context_data(**kwargs)
# context['context_object_name'] = CustomerUser._meta.verbose_name
# else:
# context = redirect('/')
#
# return context
#
# class LoginView(FormView):
# """
# Class for CustomerUser login view.
# """
# http_method_names = [u'get', u'post']
#
# def get(self, request):
# if not request.user.is_authenticated:
# form = LoginForm()
# response = render(request, 'login.html', {'form': form})
# else:
# response = redirect('/')
#
# return response
#
# def post(self, request):
# form = LoginForm(request.POST)
#
# username = request.POST['username']
# password = request.POST['password']
# user = authenticate(request, username=username, password=password)
#
# if user is not None:
# login(request, user)
# response = redirect('/')
# else:
# messages.error(request, 'Nome de usuário e/ou senha incorreto(s).')
# response = render(request, 'login.html', {'form': form})
#
# return response
#
# class LogoutView(FormView):
# """
# Class for CustomerUser logout view.
# """
# http_method_names = [u'get']
#
# def get(self, request):
# if request.user.is_authenticated:
# logout(request)
# response = redirect('/')
# else:
# response = redirect('/')
#
# return response
. Output only the next line. | name='list_view'), |
Using the snippet: <|code_start|>
app_name = UsersConfig.name
urlpatterns = [
url(r'^$', CustomerUserListView.as_view(),
<|code_end|>
, determine the next line of code. You have imports:
from django.conf.urls import url
from .apps import UsersConfig
from .views import (CustomerUserRegistrationView,
CustomerUserDelectionView,
CustomerUserUpdateView,
CustomerUserListView,
CustomerUserDetailView,
LoginView,
LogoutView,)
and context (class names, function names, or code) available:
# Path: users/apps.py
# class UsersConfig(AppConfig):
# name = 'users'
# label = 'users'
# verbose_name = _('Usuário')
# verbose_name_plural = _('Usuários')
#
# Path: users/views.py
# class CustomerUserRegistrationView(FormView):
#
# """
# Class for CustomerUser registration view.
# """
# http_method_names = [u'get', u'post']
#
# def get(self, request):
# form = CustomerUserRegistrationForm()
# response = render(request, 'signup.html', {'form': form})
# return response
#
# def post(self, request):
# form = CustomerUserRegistrationForm(request.POST)
#
# if form.is_valid():
# user = form.save(commit=False)
# password = form.cleaned_data['password']
# user.set_password(password)
# user.save()
#
# response = redirect("/")
# else:
# response = render(request, 'signup.html', {'form': form})
#
# return response
#
# class CustomerUserDelectionView(FormView):
#
# http_method_names = [u'get', u'post']
#
# def get(self, request):
# form = CustomerUserDelectionForm()
# response = render(request, 'excluirConta.html', {'form': form})
# return response
#
# def post(self, request):
# form = CustomerUserDelectionForm(request.POST)
#
# if form.is_valid():
# data = {}
# data['password'] = request.POST['password']
#
# password = request.POST['password']
#
# # utilizando o authenticate do django
# user = authenticate(username=request.user.username,
# password=password)
# if user is not None:
# user = form.delete()
# user.delete()
# messages.sucess(request, 'Sua conta foi excluída')
# response = render(request, 'signup.html')
# return response
# else:
# pass
#
# else:
# response = render(request, 'excluirConta.html', {'form': form})
#
# return response
#
# class CustomerUserUpdateView(UpdateView):
# """
# Class for CustomerUser edit/update view implementation
# """
#
# model = CustomerUser
# slug_field = 'id'
# pk_url_kwarg = 'id'
# fields = ['username', 'first_name', 'last_name', 'email', 'password']
#
# def get_queryset(self):
# return CustomerUser.objects.all()
#
# def get(self, request, id):
# if request.user.id == int(id):
# instance = CustomerUser.objects.get(id=id)
# form = CustomerUserUpdateForm(request.POST or None,
# instance=instance)
#
# if form.is_valid():
# form.save()
# return redirect('/')
#
# response = render(request, 'edit.html', {'form': form})
# else:
# response = redirect('/')
#
# return response
#
# class CustomerUserListView(ListView):
# model = CustomerUser
#
# def get_context_data(self, **kwargs):
#
# context = super(CustomerUserListView,
# self).get_context_data(**kwargs)
# context['context_object_name'] = CustomerUser._meta.verbose_name
# context['context_object_name_plural'] = (
# CustomerUser._meta.verbose_name_plural)
# return context
#
# class CustomerUserDetailView(DetailView):
# model = CustomerUser
#
# def get_context_data(self, **kwargs):
#
# if self.request.user.id == kwargs['object'].id:
# context = super(CustomerUserDetailView,
# self).get_context_data(**kwargs)
# context['context_object_name'] = CustomerUser._meta.verbose_name
# else:
# context = redirect('/')
#
# return context
#
# class LoginView(FormView):
# """
# Class for CustomerUser login view.
# """
# http_method_names = [u'get', u'post']
#
# def get(self, request):
# if not request.user.is_authenticated:
# form = LoginForm()
# response = render(request, 'login.html', {'form': form})
# else:
# response = redirect('/')
#
# return response
#
# def post(self, request):
# form = LoginForm(request.POST)
#
# username = request.POST['username']
# password = request.POST['password']
# user = authenticate(request, username=username, password=password)
#
# if user is not None:
# login(request, user)
# response = redirect('/')
# else:
# messages.error(request, 'Nome de usuário e/ou senha incorreto(s).')
# response = render(request, 'login.html', {'form': form})
#
# return response
#
# class LogoutView(FormView):
# """
# Class for CustomerUser logout view.
# """
# http_method_names = [u'get']
#
# def get(self, request):
# if request.user.is_authenticated:
# logout(request)
# response = redirect('/')
# else:
# response = redirect('/')
#
# return response
. Output only the next line. | name='list_view'), |
Here is a snippet: <|code_start|>
app_name = UsersConfig.name
urlpatterns = [
url(r'^$', CustomerUserListView.as_view(),
name='list_view'),
url(r'^sign_up/$', CustomerUserRegistrationView.as_view(),
<|code_end|>
. Write the next line using the current file imports:
from django.conf.urls import url
from .apps import UsersConfig
from .views import (CustomerUserRegistrationView,
CustomerUserDelectionView,
CustomerUserUpdateView,
CustomerUserListView,
CustomerUserDetailView,
LoginView,
LogoutView,)
and context from other files:
# Path: users/apps.py
# class UsersConfig(AppConfig):
# name = 'users'
# label = 'users'
# verbose_name = _('Usuário')
# verbose_name_plural = _('Usuários')
#
# Path: users/views.py
# class CustomerUserRegistrationView(FormView):
#
# """
# Class for CustomerUser registration view.
# """
# http_method_names = [u'get', u'post']
#
# def get(self, request):
# form = CustomerUserRegistrationForm()
# response = render(request, 'signup.html', {'form': form})
# return response
#
# def post(self, request):
# form = CustomerUserRegistrationForm(request.POST)
#
# if form.is_valid():
# user = form.save(commit=False)
# password = form.cleaned_data['password']
# user.set_password(password)
# user.save()
#
# response = redirect("/")
# else:
# response = render(request, 'signup.html', {'form': form})
#
# return response
#
# class CustomerUserDelectionView(FormView):
#
# http_method_names = [u'get', u'post']
#
# def get(self, request):
# form = CustomerUserDelectionForm()
# response = render(request, 'excluirConta.html', {'form': form})
# return response
#
# def post(self, request):
# form = CustomerUserDelectionForm(request.POST)
#
# if form.is_valid():
# data = {}
# data['password'] = request.POST['password']
#
# password = request.POST['password']
#
# # utilizando o authenticate do django
# user = authenticate(username=request.user.username,
# password=password)
# if user is not None:
# user = form.delete()
# user.delete()
# messages.sucess(request, 'Sua conta foi excluída')
# response = render(request, 'signup.html')
# return response
# else:
# pass
#
# else:
# response = render(request, 'excluirConta.html', {'form': form})
#
# return response
#
# class CustomerUserUpdateView(UpdateView):
# """
# Class for CustomerUser edit/update view implementation
# """
#
# model = CustomerUser
# slug_field = 'id'
# pk_url_kwarg = 'id'
# fields = ['username', 'first_name', 'last_name', 'email', 'password']
#
# def get_queryset(self):
# return CustomerUser.objects.all()
#
# def get(self, request, id):
# if request.user.id == int(id):
# instance = CustomerUser.objects.get(id=id)
# form = CustomerUserUpdateForm(request.POST or None,
# instance=instance)
#
# if form.is_valid():
# form.save()
# return redirect('/')
#
# response = render(request, 'edit.html', {'form': form})
# else:
# response = redirect('/')
#
# return response
#
# class CustomerUserListView(ListView):
# model = CustomerUser
#
# def get_context_data(self, **kwargs):
#
# context = super(CustomerUserListView,
# self).get_context_data(**kwargs)
# context['context_object_name'] = CustomerUser._meta.verbose_name
# context['context_object_name_plural'] = (
# CustomerUser._meta.verbose_name_plural)
# return context
#
# class CustomerUserDetailView(DetailView):
# model = CustomerUser
#
# def get_context_data(self, **kwargs):
#
# if self.request.user.id == kwargs['object'].id:
# context = super(CustomerUserDetailView,
# self).get_context_data(**kwargs)
# context['context_object_name'] = CustomerUser._meta.verbose_name
# else:
# context = redirect('/')
#
# return context
#
# class LoginView(FormView):
# """
# Class for CustomerUser login view.
# """
# http_method_names = [u'get', u'post']
#
# def get(self, request):
# if not request.user.is_authenticated:
# form = LoginForm()
# response = render(request, 'login.html', {'form': form})
# else:
# response = redirect('/')
#
# return response
#
# def post(self, request):
# form = LoginForm(request.POST)
#
# username = request.POST['username']
# password = request.POST['password']
# user = authenticate(request, username=username, password=password)
#
# if user is not None:
# login(request, user)
# response = redirect('/')
# else:
# messages.error(request, 'Nome de usuário e/ou senha incorreto(s).')
# response = render(request, 'login.html', {'form': form})
#
# return response
#
# class LogoutView(FormView):
# """
# Class for CustomerUser logout view.
# """
# http_method_names = [u'get']
#
# def get(self, request):
# if request.user.is_authenticated:
# logout(request)
# response = redirect('/')
# else:
# response = redirect('/')
#
# return response
, which may include functions, classes, or code. Output only the next line. | name='sign_up'), |
Given the code snippet: <|code_start|>
app_name = UsersConfig.name
urlpatterns = [
url(r'^$', CustomerUserListView.as_view(),
name='list_view'),
url(r'^sign_up/$', CustomerUserRegistrationView.as_view(),
name='sign_up'),
url(r'^excluir_conta/$', CustomerUserDelectionView.as_view(),
name='excluir_conta'),
url(r'^(?P<id>\d+)/edit/$',
CustomerUserUpdateView.as_view(), name='edit'),
url(r'^(?P<pk>\d+)/$', CustomerUserDetailView.as_view(),
name='detail'),
url(r'^login/$', LoginView.as_view(),
<|code_end|>
, generate the next line using the imports in this file:
from django.conf.urls import url
from .apps import UsersConfig
from .views import (CustomerUserRegistrationView,
CustomerUserDelectionView,
CustomerUserUpdateView,
CustomerUserListView,
CustomerUserDetailView,
LoginView,
LogoutView,)
and context (functions, classes, or occasionally code) from other files:
# Path: users/apps.py
# class UsersConfig(AppConfig):
# name = 'users'
# label = 'users'
# verbose_name = _('Usuário')
# verbose_name_plural = _('Usuários')
#
# Path: users/views.py
# class CustomerUserRegistrationView(FormView):
#
# """
# Class for CustomerUser registration view.
# """
# http_method_names = [u'get', u'post']
#
# def get(self, request):
# form = CustomerUserRegistrationForm()
# response = render(request, 'signup.html', {'form': form})
# return response
#
# def post(self, request):
# form = CustomerUserRegistrationForm(request.POST)
#
# if form.is_valid():
# user = form.save(commit=False)
# password = form.cleaned_data['password']
# user.set_password(password)
# user.save()
#
# response = redirect("/")
# else:
# response = render(request, 'signup.html', {'form': form})
#
# return response
#
# class CustomerUserDelectionView(FormView):
#
# http_method_names = [u'get', u'post']
#
# def get(self, request):
# form = CustomerUserDelectionForm()
# response = render(request, 'excluirConta.html', {'form': form})
# return response
#
# def post(self, request):
# form = CustomerUserDelectionForm(request.POST)
#
# if form.is_valid():
# data = {}
# data['password'] = request.POST['password']
#
# password = request.POST['password']
#
# # utilizando o authenticate do django
# user = authenticate(username=request.user.username,
# password=password)
# if user is not None:
# user = form.delete()
# user.delete()
# messages.sucess(request, 'Sua conta foi excluída')
# response = render(request, 'signup.html')
# return response
# else:
# pass
#
# else:
# response = render(request, 'excluirConta.html', {'form': form})
#
# return response
#
# class CustomerUserUpdateView(UpdateView):
# """
# Class for CustomerUser edit/update view implementation
# """
#
# model = CustomerUser
# slug_field = 'id'
# pk_url_kwarg = 'id'
# fields = ['username', 'first_name', 'last_name', 'email', 'password']
#
# def get_queryset(self):
# return CustomerUser.objects.all()
#
# def get(self, request, id):
# if request.user.id == int(id):
# instance = CustomerUser.objects.get(id=id)
# form = CustomerUserUpdateForm(request.POST or None,
# instance=instance)
#
# if form.is_valid():
# form.save()
# return redirect('/')
#
# response = render(request, 'edit.html', {'form': form})
# else:
# response = redirect('/')
#
# return response
#
# class CustomerUserListView(ListView):
# model = CustomerUser
#
# def get_context_data(self, **kwargs):
#
# context = super(CustomerUserListView,
# self).get_context_data(**kwargs)
# context['context_object_name'] = CustomerUser._meta.verbose_name
# context['context_object_name_plural'] = (
# CustomerUser._meta.verbose_name_plural)
# return context
#
# class CustomerUserDetailView(DetailView):
# model = CustomerUser
#
# def get_context_data(self, **kwargs):
#
# if self.request.user.id == kwargs['object'].id:
# context = super(CustomerUserDetailView,
# self).get_context_data(**kwargs)
# context['context_object_name'] = CustomerUser._meta.verbose_name
# else:
# context = redirect('/')
#
# return context
#
# class LoginView(FormView):
# """
# Class for CustomerUser login view.
# """
# http_method_names = [u'get', u'post']
#
# def get(self, request):
# if not request.user.is_authenticated:
# form = LoginForm()
# response = render(request, 'login.html', {'form': form})
# else:
# response = redirect('/')
#
# return response
#
# def post(self, request):
# form = LoginForm(request.POST)
#
# username = request.POST['username']
# password = request.POST['password']
# user = authenticate(request, username=username, password=password)
#
# if user is not None:
# login(request, user)
# response = redirect('/')
# else:
# messages.error(request, 'Nome de usuário e/ou senha incorreto(s).')
# response = render(request, 'login.html', {'form': form})
#
# return response
#
# class LogoutView(FormView):
# """
# Class for CustomerUser logout view.
# """
# http_method_names = [u'get']
#
# def get(self, request):
# if request.user.is_authenticated:
# logout(request)
# response = redirect('/')
# else:
# response = redirect('/')
#
# return response
. Output only the next line. | name='login'), |
Given snippet: <|code_start|>
app_name = UsersConfig.name
urlpatterns = [
url(r'^$', CustomerUserListView.as_view(),
name='list_view'),
url(r'^sign_up/$', CustomerUserRegistrationView.as_view(),
name='sign_up'),
url(r'^excluir_conta/$', CustomerUserDelectionView.as_view(),
name='excluir_conta'),
url(r'^(?P<id>\d+)/edit/$',
CustomerUserUpdateView.as_view(), name='edit'),
url(r'^(?P<pk>\d+)/$', CustomerUserDetailView.as_view(),
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from django.conf.urls import url
from .apps import UsersConfig
from .views import (CustomerUserRegistrationView,
CustomerUserDelectionView,
CustomerUserUpdateView,
CustomerUserListView,
CustomerUserDetailView,
LoginView,
LogoutView,)
and context:
# Path: users/apps.py
# class UsersConfig(AppConfig):
# name = 'users'
# label = 'users'
# verbose_name = _('Usuário')
# verbose_name_plural = _('Usuários')
#
# Path: users/views.py
# class CustomerUserRegistrationView(FormView):
#
# """
# Class for CustomerUser registration view.
# """
# http_method_names = [u'get', u'post']
#
# def get(self, request):
# form = CustomerUserRegistrationForm()
# response = render(request, 'signup.html', {'form': form})
# return response
#
# def post(self, request):
# form = CustomerUserRegistrationForm(request.POST)
#
# if form.is_valid():
# user = form.save(commit=False)
# password = form.cleaned_data['password']
# user.set_password(password)
# user.save()
#
# response = redirect("/")
# else:
# response = render(request, 'signup.html', {'form': form})
#
# return response
#
# class CustomerUserDelectionView(FormView):
#
# http_method_names = [u'get', u'post']
#
# def get(self, request):
# form = CustomerUserDelectionForm()
# response = render(request, 'excluirConta.html', {'form': form})
# return response
#
# def post(self, request):
# form = CustomerUserDelectionForm(request.POST)
#
# if form.is_valid():
# data = {}
# data['password'] = request.POST['password']
#
# password = request.POST['password']
#
# # utilizando o authenticate do django
# user = authenticate(username=request.user.username,
# password=password)
# if user is not None:
# user = form.delete()
# user.delete()
# messages.sucess(request, 'Sua conta foi excluída')
# response = render(request, 'signup.html')
# return response
# else:
# pass
#
# else:
# response = render(request, 'excluirConta.html', {'form': form})
#
# return response
#
# class CustomerUserUpdateView(UpdateView):
# """
# Class for CustomerUser edit/update view implementation
# """
#
# model = CustomerUser
# slug_field = 'id'
# pk_url_kwarg = 'id'
# fields = ['username', 'first_name', 'last_name', 'email', 'password']
#
# def get_queryset(self):
# return CustomerUser.objects.all()
#
# def get(self, request, id):
# if request.user.id == int(id):
# instance = CustomerUser.objects.get(id=id)
# form = CustomerUserUpdateForm(request.POST or None,
# instance=instance)
#
# if form.is_valid():
# form.save()
# return redirect('/')
#
# response = render(request, 'edit.html', {'form': form})
# else:
# response = redirect('/')
#
# return response
#
# class CustomerUserListView(ListView):
# model = CustomerUser
#
# def get_context_data(self, **kwargs):
#
# context = super(CustomerUserListView,
# self).get_context_data(**kwargs)
# context['context_object_name'] = CustomerUser._meta.verbose_name
# context['context_object_name_plural'] = (
# CustomerUser._meta.verbose_name_plural)
# return context
#
# class CustomerUserDetailView(DetailView):
# model = CustomerUser
#
# def get_context_data(self, **kwargs):
#
# if self.request.user.id == kwargs['object'].id:
# context = super(CustomerUserDetailView,
# self).get_context_data(**kwargs)
# context['context_object_name'] = CustomerUser._meta.verbose_name
# else:
# context = redirect('/')
#
# return context
#
# class LoginView(FormView):
# """
# Class for CustomerUser login view.
# """
# http_method_names = [u'get', u'post']
#
# def get(self, request):
# if not request.user.is_authenticated:
# form = LoginForm()
# response = render(request, 'login.html', {'form': form})
# else:
# response = redirect('/')
#
# return response
#
# def post(self, request):
# form = LoginForm(request.POST)
#
# username = request.POST['username']
# password = request.POST['password']
# user = authenticate(request, username=username, password=password)
#
# if user is not None:
# login(request, user)
# response = redirect('/')
# else:
# messages.error(request, 'Nome de usuário e/ou senha incorreto(s).')
# response = render(request, 'login.html', {'form': form})
#
# return response
#
# class LogoutView(FormView):
# """
# Class for CustomerUser logout view.
# """
# http_method_names = [u'get']
#
# def get(self, request):
# if request.user.is_authenticated:
# logout(request)
# response = redirect('/')
# else:
# response = redirect('/')
#
# return response
which might include code, classes, or functions. Output only the next line. | name='detail'), |
Predict the next line after this snippet: <|code_start|>
app_name = UsersConfig.name
urlpatterns = [
url(r'^$', CustomerUserListView.as_view(),
name='list_view'),
url(r'^sign_up/$', CustomerUserRegistrationView.as_view(),
name='sign_up'),
url(r'^excluir_conta/$', CustomerUserDelectionView.as_view(),
name='excluir_conta'),
url(r'^(?P<id>\d+)/edit/$',
CustomerUserUpdateView.as_view(), name='edit'),
url(r'^(?P<pk>\d+)/$', CustomerUserDetailView.as_view(),
name='detail'),
url(r'^login/$', LoginView.as_view(),
<|code_end|>
using the current file's imports:
from django.conf.urls import url
from .apps import UsersConfig
from .views import (CustomerUserRegistrationView,
CustomerUserDelectionView,
CustomerUserUpdateView,
CustomerUserListView,
CustomerUserDetailView,
LoginView,
LogoutView,)
and any relevant context from other files:
# Path: users/apps.py
# class UsersConfig(AppConfig):
# name = 'users'
# label = 'users'
# verbose_name = _('Usuário')
# verbose_name_plural = _('Usuários')
#
# Path: users/views.py
# class CustomerUserRegistrationView(FormView):
#
# """
# Class for CustomerUser registration view.
# """
# http_method_names = [u'get', u'post']
#
# def get(self, request):
# form = CustomerUserRegistrationForm()
# response = render(request, 'signup.html', {'form': form})
# return response
#
# def post(self, request):
# form = CustomerUserRegistrationForm(request.POST)
#
# if form.is_valid():
# user = form.save(commit=False)
# password = form.cleaned_data['password']
# user.set_password(password)
# user.save()
#
# response = redirect("/")
# else:
# response = render(request, 'signup.html', {'form': form})
#
# return response
#
# class CustomerUserDelectionView(FormView):
#
# http_method_names = [u'get', u'post']
#
# def get(self, request):
# form = CustomerUserDelectionForm()
# response = render(request, 'excluirConta.html', {'form': form})
# return response
#
# def post(self, request):
# form = CustomerUserDelectionForm(request.POST)
#
# if form.is_valid():
# data = {}
# data['password'] = request.POST['password']
#
# password = request.POST['password']
#
# # utilizando o authenticate do django
# user = authenticate(username=request.user.username,
# password=password)
# if user is not None:
# user = form.delete()
# user.delete()
# messages.sucess(request, 'Sua conta foi excluída')
# response = render(request, 'signup.html')
# return response
# else:
# pass
#
# else:
# response = render(request, 'excluirConta.html', {'form': form})
#
# return response
#
# class CustomerUserUpdateView(UpdateView):
# """
# Class for CustomerUser edit/update view implementation
# """
#
# model = CustomerUser
# slug_field = 'id'
# pk_url_kwarg = 'id'
# fields = ['username', 'first_name', 'last_name', 'email', 'password']
#
# def get_queryset(self):
# return CustomerUser.objects.all()
#
# def get(self, request, id):
# if request.user.id == int(id):
# instance = CustomerUser.objects.get(id=id)
# form = CustomerUserUpdateForm(request.POST or None,
# instance=instance)
#
# if form.is_valid():
# form.save()
# return redirect('/')
#
# response = render(request, 'edit.html', {'form': form})
# else:
# response = redirect('/')
#
# return response
#
# class CustomerUserListView(ListView):
# model = CustomerUser
#
# def get_context_data(self, **kwargs):
#
# context = super(CustomerUserListView,
# self).get_context_data(**kwargs)
# context['context_object_name'] = CustomerUser._meta.verbose_name
# context['context_object_name_plural'] = (
# CustomerUser._meta.verbose_name_plural)
# return context
#
# class CustomerUserDetailView(DetailView):
# model = CustomerUser
#
# def get_context_data(self, **kwargs):
#
# if self.request.user.id == kwargs['object'].id:
# context = super(CustomerUserDetailView,
# self).get_context_data(**kwargs)
# context['context_object_name'] = CustomerUser._meta.verbose_name
# else:
# context = redirect('/')
#
# return context
#
# class LoginView(FormView):
# """
# Class for CustomerUser login view.
# """
# http_method_names = [u'get', u'post']
#
# def get(self, request):
# if not request.user.is_authenticated:
# form = LoginForm()
# response = render(request, 'login.html', {'form': form})
# else:
# response = redirect('/')
#
# return response
#
# def post(self, request):
# form = LoginForm(request.POST)
#
# username = request.POST['username']
# password = request.POST['password']
# user = authenticate(request, username=username, password=password)
#
# if user is not None:
# login(request, user)
# response = redirect('/')
# else:
# messages.error(request, 'Nome de usuário e/ou senha incorreto(s).')
# response = render(request, 'login.html', {'form': form})
#
# return response
#
# class LogoutView(FormView):
# """
# Class for CustomerUser logout view.
# """
# http_method_names = [u'get']
#
# def get(self, request):
# if request.user.is_authenticated:
# logout(request)
# response = redirect('/')
# else:
# response = redirect('/')
#
# return response
. Output only the next line. | name='login'), |
Given snippet: <|code_start|>
def auth_view_decorator(function_decorator):
def simple_decorator(View):
View.dispatch = method_decorator(function_decorator)(View.dispatch)
return View
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from django.contrib import messages
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.decorators import login_required
from django.shortcuts import redirect, render
from django.utils.decorators import method_decorator
from django.views.generic import DetailView, ListView
from django.views.generic.edit import FormView, UpdateView
from users.forms import (
CustomerUserDelectionForm,
CustomerUserRegistrationForm,
CustomerUserUpdateForm,
LoginForm)
from .models import CustomerUser
and context:
# Path: users/forms.py
# class CustomerUserDelectionForm(forms.ModelForm):
# password = forms.CharField(label=_("Senha"), widget=forms.PasswordInput)
# password_validation = forms.\
# CharField(label=_("Confirmação de senha"), widget=forms.PasswordInput)
#
# class Meta:
# model = CustomerUser
# fields = ['password']
#
# def clean(self):
# cleaned_data = super(CustomerUserDelectionForm, self).clean()
#
# # Password validation
# password = cleaned_data.get('password')
# password_validation = cleaned_data.get('password_validation')
#
# password_error_message = _("A senha deve ser igual à confirmação \
# de senha.")
# if password and password_validation:
# if password != password_validation:
# password_error = forms.ValidationError(password_error_message)
# self.add_error('password', password_error)
#
# return cleaned_data
#
# class CustomerUserRegistrationForm(forms.ModelForm):
# """
# Class for CustomerUser registration form.
# """
# username = forms.CharField(label=_("Nome de usuário"))
# first_name = forms.CharField(label=_("Nome"))
# last_name = forms.CharField(label=_("Sobrenome"))
# email = forms.CharField(label=_("E-mail"), validators=[validate_email])
# password = forms.CharField(label=_("Senha"), widget=forms.PasswordInput)
# password_validation = forms.\
# CharField(label=_("Confirmação de senha"), widget=forms.PasswordInput)
#
# class Meta:
# model = CustomerUser
# fields = ['username', 'first_name', 'last_name', 'email', 'password',
# 'cellphone', 'phone_number']
#
# def clean(self):
# cleaned_data = super(CustomerUserRegistrationForm, self).clean()
#
# # Password validation
# password = cleaned_data.get('password')
# password_validation = cleaned_data.get('password_validation')
#
# password_error_message = _("A senha deve ser igual à confirmação \
# de senha.")
# if password and password_validation:
# if password != password_validation:
# password_error = forms.ValidationError(password_error_message)
# self.add_error('password', password_error)
#
# # Username UNIQUE constraint validation
# username = cleaned_data.get('username')
#
# username_error_message = _("Este nome de usuário não está disponível.")
# if CustomerUser.objects.filter(username=username).exists():
# raise ValidationError(username_error_message)
#
# return cleaned_data
#
# class CustomerUserUpdateForm(forms.ModelForm):
# """
# class for CustomerUser update form
# """
#
# username = forms.CharField(label=_("Nome de usuário"))
# first_name = forms.CharField(label=_("Nome"))
# last_name = forms.CharField(label=_("Sobrenome"))
# email = forms.CharField(label=_("E-mail"), validators=[validate_email])
# password = forms.CharField(label=_("Senha"), widget=forms.PasswordInput)
# password_validation = forms.\
# CharField(label=_("Confirmação de senha"), widget=forms.PasswordInput)
#
# class Meta:
# model = CustomerUser
# fields = ['username', 'first_name', 'last_name', 'email', 'password']
#
# class LoginForm(forms.Form):
# """
# Class for Costumer User Login form
# """
#
# username = forms.CharField(label=_("Nome de usuário"))
# password = forms.CharField(label=_("Senha"), widget=forms.PasswordInput)
#
# Path: users/models.py
# class CustomerUser(User):
# """docstring for CustomerUser"""
# cellphone = models.CharField(
# help_text=_("Número de telefone. Preencha apenas com númreos."),
# verbose_name=_("Telefone Celular"),
# max_length=15, null=False, blank=False)
#
# phone_number = models.CharField(
# help_text=_("Número de telefone. Preencha apenas com númreos."),
# verbose_name=_("Teledone Fixo"),
# max_length=15, null=True, blank=False)
#
# class Meta:
# verbose_name = _('Cliente')
# verbose_name_plural = _('Clientes')
#
# def __str__(self):
# return (self.first_name + " " + self.last_name)
which might include code, classes, or functions. Output only the next line. | return simple_decorator |
Continue the code snippet: <|code_start|>
def auth_view_decorator(function_decorator):
def simple_decorator(View):
View.dispatch = method_decorator(function_decorator)(View.dispatch)
return View
return simple_decorator
<|code_end|>
. Use current file imports:
from django.contrib import messages
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.decorators import login_required
from django.shortcuts import redirect, render
from django.utils.decorators import method_decorator
from django.views.generic import DetailView, ListView
from django.views.generic.edit import FormView, UpdateView
from users.forms import (
CustomerUserDelectionForm,
CustomerUserRegistrationForm,
CustomerUserUpdateForm,
LoginForm)
from .models import CustomerUser
and context (classes, functions, or code) from other files:
# Path: users/forms.py
# class CustomerUserDelectionForm(forms.ModelForm):
# password = forms.CharField(label=_("Senha"), widget=forms.PasswordInput)
# password_validation = forms.\
# CharField(label=_("Confirmação de senha"), widget=forms.PasswordInput)
#
# class Meta:
# model = CustomerUser
# fields = ['password']
#
# def clean(self):
# cleaned_data = super(CustomerUserDelectionForm, self).clean()
#
# # Password validation
# password = cleaned_data.get('password')
# password_validation = cleaned_data.get('password_validation')
#
# password_error_message = _("A senha deve ser igual à confirmação \
# de senha.")
# if password and password_validation:
# if password != password_validation:
# password_error = forms.ValidationError(password_error_message)
# self.add_error('password', password_error)
#
# return cleaned_data
#
# class CustomerUserRegistrationForm(forms.ModelForm):
# """
# Class for CustomerUser registration form.
# """
# username = forms.CharField(label=_("Nome de usuário"))
# first_name = forms.CharField(label=_("Nome"))
# last_name = forms.CharField(label=_("Sobrenome"))
# email = forms.CharField(label=_("E-mail"), validators=[validate_email])
# password = forms.CharField(label=_("Senha"), widget=forms.PasswordInput)
# password_validation = forms.\
# CharField(label=_("Confirmação de senha"), widget=forms.PasswordInput)
#
# class Meta:
# model = CustomerUser
# fields = ['username', 'first_name', 'last_name', 'email', 'password',
# 'cellphone', 'phone_number']
#
# def clean(self):
# cleaned_data = super(CustomerUserRegistrationForm, self).clean()
#
# # Password validation
# password = cleaned_data.get('password')
# password_validation = cleaned_data.get('password_validation')
#
# password_error_message = _("A senha deve ser igual à confirmação \
# de senha.")
# if password and password_validation:
# if password != password_validation:
# password_error = forms.ValidationError(password_error_message)
# self.add_error('password', password_error)
#
# # Username UNIQUE constraint validation
# username = cleaned_data.get('username')
#
# username_error_message = _("Este nome de usuário não está disponível.")
# if CustomerUser.objects.filter(username=username).exists():
# raise ValidationError(username_error_message)
#
# return cleaned_data
#
# class CustomerUserUpdateForm(forms.ModelForm):
# """
# class for CustomerUser update form
# """
#
# username = forms.CharField(label=_("Nome de usuário"))
# first_name = forms.CharField(label=_("Nome"))
# last_name = forms.CharField(label=_("Sobrenome"))
# email = forms.CharField(label=_("E-mail"), validators=[validate_email])
# password = forms.CharField(label=_("Senha"), widget=forms.PasswordInput)
# password_validation = forms.\
# CharField(label=_("Confirmação de senha"), widget=forms.PasswordInput)
#
# class Meta:
# model = CustomerUser
# fields = ['username', 'first_name', 'last_name', 'email', 'password']
#
# class LoginForm(forms.Form):
# """
# Class for Costumer User Login form
# """
#
# username = forms.CharField(label=_("Nome de usuário"))
# password = forms.CharField(label=_("Senha"), widget=forms.PasswordInput)
#
# Path: users/models.py
# class CustomerUser(User):
# """docstring for CustomerUser"""
# cellphone = models.CharField(
# help_text=_("Número de telefone. Preencha apenas com númreos."),
# verbose_name=_("Telefone Celular"),
# max_length=15, null=False, blank=False)
#
# phone_number = models.CharField(
# help_text=_("Número de telefone. Preencha apenas com númreos."),
# verbose_name=_("Teledone Fixo"),
# max_length=15, null=True, blank=False)
#
# class Meta:
# verbose_name = _('Cliente')
# verbose_name_plural = _('Clientes')
#
# def __str__(self):
# return (self.first_name + " " + self.last_name)
. Output only the next line. | class CustomerUserRegistrationView(FormView): |
Here is a snippet: <|code_start|># -*- coding: utf-8 -*-
app = Flask(__name__)
print_queue = Queue()
prints_folder = pkConfig["paths"]["event_prints"]
monitor_folder = pkConfig["paths"]["event_composites"]
glob_string = "*" + pkConfig["compositor"]["filetype"]
regex = re.compile(r"^(.*?)-(\d+)$")
config = dict()
class PrintQueueManager(threading.Thread):
def run(self, *args, **kwargs):
print_interval = pkConfig["server"]["print-queue"]["interval"]
self.running = True
while self.running:
filename = print_queue.get()
src = os.path.join(prints_folder, filename)
smart_copy(src, "/home/retrobooth/smb-printsrv/")
if print_interval:
time.sleep(print_interval)
def stop(self):
self.running = False
<|code_end|>
. Write the next line using the current file imports:
import atexit
import os
import re
import shutil
import threading
import time
from multiprocessing import Queue
from os.path import join
from flask import Flask, jsonify, request, send_from_directory
from tailor.config import pkConfig
and context from other files:
# Path: tailor/config.py
# def jpath(*args):
# def reload(path):
, which may include functions, classes, or code. Output only the next line. | def get_filenames_to_serve(): |
Given the following code snippet before the placeholder: <|code_start|># -*- coding: utf-8 -*-
sys.path.extend(['..', '.'])
async def test_render():
renderer = TemplateRenderer()
builder = YamlTemplateBuilder()
root = builder.read('tailor/resources/templates/standard.yaml')
im = Image.new('RGB', (5184, 3456), (128, 0, 0))
root.push_image(im)
<|code_end|>
, predict the next line using imports from the current file:
import sys
import asyncio
from unittest import TestCase
from tailor.builder import YamlTemplateBuilder
from tailor.plugins.composer.renderer import TemplateRenderer
from PIL import Image
and context including class names, function names, and sometimes code from other files:
# Path: tailor/builder.py
# class YamlTemplateBuilder(TemplateBuilder):
# def read(self, filename):
# import yaml
#
# with open(filename) as fp:
# config = yaml.load(fp)
#
# return self.build_graph(config)
#
# Path: tailor/plugins/composer/renderer.py
# class TemplateRenderer:
# """
# Render template graphs using PIL
# """
#
# image_mode = "RGBA"
#
# def __init__(self):
# self.handlers = {
# "area": self.render_area_node,
# "image": self.render_image_node,
# "placeholder": self.render_cropped_node,
# }
#
# def render_node(self, node):
# """ Render one node
#
# :param node: TemplateNode
# :return: (PIL Image or None, Rect or None)
# """
# try:
# func = self.handlers[node.kind]
# except KeyError:
# return None, None
# return func(node)
#
# def render_all(self, root):
# """ Render a new image and all nodes. Must pass in the root node.
#
# :param root: Root node
# """
#
# def func():
# base_image = self.create_blank_image(root)
#
# for node in root.bfs_children():
# self.render_and_paste(node, base_image)
#
# return base_image
#
# loop = asyncio.get_event_loop()
# return loop.run_in_executor(None, func)
#
# @asyncio.coroutine
# def render_all_and_save(self, root, filename):
# """ Render the template, then save it to a file
#
# :param root:
# :param filename:
#
# :type root:
# :type filename: str
#
# :returns: None
# """
# image = yield from self.render_all(root)
# loop = asyncio.get_event_loop()
# return loop.run_in_executor(None, image.save, filename)
#
# def render_and_paste(self, node, base_image):
# """ Render a node, if there is a result, then paste to the base_image
#
# :param node: TemplateNode
# :param base_image: PIL image
#
# :return: PIL Image of node, else None
# """
# image, rect = self.render_node(node)
# if image is not None:
# x, y, w, h = rect
# self.paste(image, base_image, (x, y))
#
# return image
#
# @staticmethod
# def paste(upper, lower, top_left):
# # correctly handle the alpha channel transparency.
# if upper.mode == "RGBA":
# lower.paste(upper, top_left, mask=upper)
# else:
# lower.paste(upper, top_left)
#
# def render_area_node(self, node):
# # draw = ImageDraw.Draw(self.image)
# # draw.rectangle(rect, (0, 255, 255))
# return None, None
#
# def render_image_node(self, node):
# # LIMITATIONS: only pastes to area, no scaling
# try:
# if node.filename is not None:
# node.data = Image.open(node.filename)
# except FileNotFoundError:
# node.data = None
#
# # TODO: scaling options, ect, processing chain
# if node.data is not None:
# root = node.get_root()
# area = self.convert_rect(node.parent.rect, root.dpi)
# return node.data, area
#
# return None, None
#
# def render_cropped_node(self, node):
# if node.data:
# root = node.get_root()
# # TODO: move these functions into a processing chain
# area = self.convert_rect(node.parent.rect, root.dpi)
# image = Autocrop().process(node.data, area)
# return image, area
# # TODO: lazy loading of images
# return None, None
#
# def create_blank_image(self, node):
# root = node.get_root()
# size = root.determine_rect()[2:]
# pixel_size = self.convert_from_image_to_pixel(size, root.dpi)
# return Image.new(self.image_mode, pixel_size)
#
# def convert_rect(self, rect, dpi):
# x1, y1, w, h = self.convert_from_image_to_pixel(rect, dpi)
# x1, y1, x2, y2 = self.convert_from_xywh((x1, y1, w, h))
# return x1, y1, w, h
#
# @staticmethod
# def convert_from_image_to_pixel(area, dpi):
# return [int(i * dpi) for i in area]
#
# @staticmethod
# def convert_from_xywh(rect):
# x, y, w, h = rect
# return x, y, x + w, y + h
. Output only the next line. | im = Image.new('RGB', (1024, 1024), (0, 128, 0)) |
Predict the next line for this snippet: <|code_start|>
async def test_render():
renderer = TemplateRenderer()
builder = YamlTemplateBuilder()
root = builder.read('tailor/resources/templates/standard.yaml')
im = Image.new('RGB', (5184, 3456), (128, 0, 0))
root.push_image(im)
im = Image.new('RGB', (1024, 1024), (0, 128, 0))
root.push_image(im)
im = Image.new('RGB', (1024, 1024), (0, 0, 128))
root.push_image(im)
im = Image.new('RGB', (1024, 1024), (255, 255, 0))
root.push_image(im)
await renderer.render_all_and_save(root, 'test_image.png')
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(test_render())
print('done')
<|code_end|>
with the help of current file imports:
import sys
import asyncio
from unittest import TestCase
from tailor.builder import YamlTemplateBuilder
from tailor.plugins.composer.renderer import TemplateRenderer
from PIL import Image
and context from other files:
# Path: tailor/builder.py
# class YamlTemplateBuilder(TemplateBuilder):
# def read(self, filename):
# import yaml
#
# with open(filename) as fp:
# config = yaml.load(fp)
#
# return self.build_graph(config)
#
# Path: tailor/plugins/composer/renderer.py
# class TemplateRenderer:
# """
# Render template graphs using PIL
# """
#
# image_mode = "RGBA"
#
# def __init__(self):
# self.handlers = {
# "area": self.render_area_node,
# "image": self.render_image_node,
# "placeholder": self.render_cropped_node,
# }
#
# def render_node(self, node):
# """ Render one node
#
# :param node: TemplateNode
# :return: (PIL Image or None, Rect or None)
# """
# try:
# func = self.handlers[node.kind]
# except KeyError:
# return None, None
# return func(node)
#
# def render_all(self, root):
# """ Render a new image and all nodes. Must pass in the root node.
#
# :param root: Root node
# """
#
# def func():
# base_image = self.create_blank_image(root)
#
# for node in root.bfs_children():
# self.render_and_paste(node, base_image)
#
# return base_image
#
# loop = asyncio.get_event_loop()
# return loop.run_in_executor(None, func)
#
# @asyncio.coroutine
# def render_all_and_save(self, root, filename):
# """ Render the template, then save it to a file
#
# :param root:
# :param filename:
#
# :type root:
# :type filename: str
#
# :returns: None
# """
# image = yield from self.render_all(root)
# loop = asyncio.get_event_loop()
# return loop.run_in_executor(None, image.save, filename)
#
# def render_and_paste(self, node, base_image):
# """ Render a node, if there is a result, then paste to the base_image
#
# :param node: TemplateNode
# :param base_image: PIL image
#
# :return: PIL Image of node, else None
# """
# image, rect = self.render_node(node)
# if image is not None:
# x, y, w, h = rect
# self.paste(image, base_image, (x, y))
#
# return image
#
# @staticmethod
# def paste(upper, lower, top_left):
# # correctly handle the alpha channel transparency.
# if upper.mode == "RGBA":
# lower.paste(upper, top_left, mask=upper)
# else:
# lower.paste(upper, top_left)
#
# def render_area_node(self, node):
# # draw = ImageDraw.Draw(self.image)
# # draw.rectangle(rect, (0, 255, 255))
# return None, None
#
# def render_image_node(self, node):
# # LIMITATIONS: only pastes to area, no scaling
# try:
# if node.filename is not None:
# node.data = Image.open(node.filename)
# except FileNotFoundError:
# node.data = None
#
# # TODO: scaling options, ect, processing chain
# if node.data is not None:
# root = node.get_root()
# area = self.convert_rect(node.parent.rect, root.dpi)
# return node.data, area
#
# return None, None
#
# def render_cropped_node(self, node):
# if node.data:
# root = node.get_root()
# # TODO: move these functions into a processing chain
# area = self.convert_rect(node.parent.rect, root.dpi)
# image = Autocrop().process(node.data, area)
# return image, area
# # TODO: lazy loading of images
# return None, None
#
# def create_blank_image(self, node):
# root = node.get_root()
# size = root.determine_rect()[2:]
# pixel_size = self.convert_from_image_to_pixel(size, root.dpi)
# return Image.new(self.image_mode, pixel_size)
#
# def convert_rect(self, rect, dpi):
# x1, y1, w, h = self.convert_from_image_to_pixel(rect, dpi)
# x1, y1, x2, y2 = self.convert_from_xywh((x1, y1, w, h))
# return x1, y1, w, h
#
# @staticmethod
# def convert_from_image_to_pixel(area, dpi):
# return [int(i * dpi) for i in area]
#
# @staticmethod
# def convert_from_xywh(rect):
# x, y, w, h = rect
# return x, y, x + w, y + h
, which may contain function names, class names, or code. Output only the next line. | class TestRenderer(TestCase): |
Predict the next line for this snippet: <|code_start|> super(SharingControls, self).__init__(*args, **kwargs)
def disable(self):
def derp(*arg):
return False
for widget in self.children:
widget.on_touch_down = derp
widget.on_touch_up = derp
widget.on_touch_motion = derp
def do_print(self):
filename = self.filename[self.filename.rindex("/") + 1 :]
url = "http://127.0.0.1:5000/print/" + filename
for i in range(self.prints):
req = UrlRequest(url, print)
def handle_print_touch(self, popup, widget):
popup.dismiss()
self.do_print()
layout = BoxLayout(orientation="vertical")
label = Label(text="Your prints will be ready soon!", font_size=30)
button = Button(text="Awesome!", font_size=30, background_color=(0, 1, 0, 1))
layout.add_widget(label)
layout.add_widget(button)
popup = Popup(
title="Just thought you should know...",
<|code_end|>
with the help of current file imports:
import os.path
from functools import partial
from kivy.config import Config
from kivy.network.urlrequest import UrlRequest
from kivy.properties import *
from kivy.uix.accordion import AccordionItem
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.button import Button
from kivy.uix.floatlayout import FloatLayout
from kivy.uix.label import Label
from kivy.uix.popup import Popup
from ..config import pkConfig
from ..smtp import SenderThread
from PIL import Image
from kivy.core.window import Window
and context from other files:
# Path: tailor/config.py
# def jpath(*args):
# def reload(path):
#
# Path: tailor/smtp.py
# class SenderThread:
# def __init__(self, address, filename):
# self.address = address
# self.filename = filename
#
# def run(self):
# sender = pkConfig["email"]["sender"]
# subject = pkConfig["email"]["subject"]
# auth_file = "/home/mjolnir/git/tailor/secrets"
#
# msg = email.MIMEMultipart.MIMEMultipart("mixed")
# msg["subject"] = subject
# msg["from"] = sender
# msg["to"] = self.address
#
# body = email.mime.Text.MIMEText("Here's your photo!\n\nThank you!\n\n")
# msg.attach(body)
#
# file_msg = email.mime.base.MIMEBase("image", "jpeg")
# file_msg.set_payload(open(self.filename).read())
# email.encoders.encode_base64(file_msg)
# file_msg.add_header("Content-Disposition", "attachment;filname=photo.jpg")
# msg.attach(file_msg)
#
# with open(auth_file) as fh:
# auth = pickle.load(fh)
# auth = auth["smtp"]
#
# with open("email.log", "a") as fh:
# fh.write("{}\t{}\n".format(self.address, self.filename))
#
# smtpout = smtplib.SMTP(auth["host"])
# smtpout.login(auth["username"], auth["password"])
# smtpout.sendmail(sender, [self.address], msg.as_string())
# smtpout.quit()
, which may contain function names, class names, or code. Output only the next line. | content=layout, |
Next line prediction: <|code_start|> label = Label(
text="You want to print {} copies?".format(self.prints), font_size=30
)
button0 = Button(
text="Just do it!", font_size=30, background_color=(0, 1, 0, 1)
)
button1 = Button(text="No", font_size=30, background_color=(1, 0, 0, 1))
layout1.add_widget(button1)
layout1.add_widget(button0)
layout0.add_widget(label)
layout0.add_widget(layout1)
popup = Popup(
title="Are you sure?",
content=layout0,
size_hint=(0.5, 0.5),
auto_dismiss=False,
)
button0.bind(on_release=partial(self.handle_print_touch, popup))
button1.bind(on_release=popup.dismiss)
popup.open()
def confirm_address(self):
if not self.email_addressee:
layout = BoxLayout(orientation="vertical")
label = Label(text="Please enter an email address", font_size=30)
button = Button(text="ok!", font_size=30, background_color=(0, 1, 0, 1))
layout.add_widget(label)
<|code_end|>
. Use current file imports:
(import os.path
from functools import partial
from kivy.config import Config
from kivy.network.urlrequest import UrlRequest
from kivy.properties import *
from kivy.uix.accordion import AccordionItem
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.button import Button
from kivy.uix.floatlayout import FloatLayout
from kivy.uix.label import Label
from kivy.uix.popup import Popup
from ..config import pkConfig
from ..smtp import SenderThread
from PIL import Image
from kivy.core.window import Window)
and context including class names, function names, or small code snippets from other files:
# Path: tailor/config.py
# def jpath(*args):
# def reload(path):
#
# Path: tailor/smtp.py
# class SenderThread:
# def __init__(self, address, filename):
# self.address = address
# self.filename = filename
#
# def run(self):
# sender = pkConfig["email"]["sender"]
# subject = pkConfig["email"]["subject"]
# auth_file = "/home/mjolnir/git/tailor/secrets"
#
# msg = email.MIMEMultipart.MIMEMultipart("mixed")
# msg["subject"] = subject
# msg["from"] = sender
# msg["to"] = self.address
#
# body = email.mime.Text.MIMEText("Here's your photo!\n\nThank you!\n\n")
# msg.attach(body)
#
# file_msg = email.mime.base.MIMEBase("image", "jpeg")
# file_msg.set_payload(open(self.filename).read())
# email.encoders.encode_base64(file_msg)
# file_msg.add_header("Content-Disposition", "attachment;filname=photo.jpg")
# msg.attach(file_msg)
#
# with open(auth_file) as fh:
# auth = pickle.load(fh)
# auth = auth["smtp"]
#
# with open("email.log", "a") as fh:
# fh.write("{}\t{}\n".format(self.address, self.filename))
#
# smtpout = smtplib.SMTP(auth["host"])
# smtpout.login(auth["username"], auth["password"])
# smtpout.sendmail(sender, [self.address], msg.as_string())
# smtpout.quit()
. Output only the next line. | layout.add_widget(button) |
Next line prediction: <|code_start|># -*- coding: utf-8 -*-
class TimingTests(TestCase):
def test_normal(self):
exp = [(False, 2), (False, 2), (False, 2), (True, 2)]
res = list(timing_generator(2, 4))
self.assertEqual(exp, res)
def test_with_initial(self):
exp = [(False, 10), (False, 2), (False, 2), (True, 2)]
res = list(timing_generator(2, 4, 10))
<|code_end|>
. Use current file imports:
(from unittest import TestCase
from apps.service.async_helpers import timing_generator)
and context including class names, function names, or small code snippets from other files:
# Path: apps/service/async_helpers.py
# def timing_generator(interval, amount, initial=None):
# """ Generator to make generic repeating timer
#
# Returns (bool, value) tuples, where:
# bool: if true, then this is last iteration of sequence
# value: ether interval or initial value
#
# :param interval: number to be returned each iteration
# :param amount: number of iterations
# :param initial: if specified, this value will be used on 1st iteration
# :return: (bool, int)
# """
# assert amount > 0
#
# if initial is not None:
# amount -= 1
# yield amount == 0, initial
#
# for index, value in enumerate(repeat(interval, amount), start=1):
# yield index == amount, value
. Output only the next line. | self.assertEqual(exp, res) |
Next line prediction: <|code_start|># -*- coding: utf-8 -*-
class SenderThread:
def __init__(self, address, filename):
self.address = address
self.filename = filename
def run(self):
sender = pkConfig["email"]["sender"]
subject = pkConfig["email"]["subject"]
auth_file = "/home/mjolnir/git/tailor/secrets"
msg = email.MIMEMultipart.MIMEMultipart("mixed")
msg["subject"] = subject
msg["from"] = sender
msg["to"] = self.address
body = email.mime.Text.MIMEText("Here's your photo!\n\nThank you!\n\n")
msg.attach(body)
file_msg = email.mime.base.MIMEBase("image", "jpeg")
file_msg.set_payload(open(self.filename).read())
email.encoders.encode_base64(file_msg)
file_msg.add_header("Content-Disposition", "attachment;filname=photo.jpg")
msg.attach(file_msg)
with open(auth_file) as fh:
<|code_end|>
. Use current file imports:
(import email
import pickle
import smtplib
from .config import pkConfig)
and context including class names, function names, or small code snippets from other files:
# Path: tailor/config.py
# def jpath(*args):
# def reload(path):
. Output only the next line. | auth = pickle.load(fh) |
Continue the code snippet: <|code_start|> feed = filelike
else:
feed = filelike.read()
self.raw = feed
self.parsed = feedparser.parse(self.raw)
self.feed = FeedGenerator()
# Set feed-level values.
self.build_feed()
self.build_entries()
def build_feed(self):
f = self.parsed.feed
for field in [
'id', 'title', 'subtitle', 'updated', 'rights', 'generator',
'docs', 'language', ('xml_lang', 'language'),
('authors', 'author'), ('links', 'link')
]:
self._copy(f, self.feed, field)
if f.get('image'):
image_kwargs = {}
for image_field in 'url', 'title', 'link', 'width', 'height', 'description':
ignore, value = self._setter(f.image, self.feed, image_field)
if value is not self.NO_VALUE:
image_kwargs[image_field] = value
if image_kwargs:
<|code_end|>
. Use current file imports:
from pdb import set_trace
from feedgen.feed import FeedGenerator
from .util import isstr
import sys
import feedparser
and context (classes, functions, or code) from other files:
# Path: botfriend/util.py
# def isstr(x):
# """Compatibility method equivalent to isinstance(x, basestring)"""
# if major == 2:
# return isinstance(x, basestring)
# return isinstance(x, bytes) or isinstance(x, str)
. Output only the next line. | self.feed.image(**image_kwargs) |
Continue the code snippet: <|code_start|> )
def self_test(self):
# Do something that will raise an exception if the credentials are invalid.
# Return a string that will let the user know if they somehow gave
# credentials to the wrong account.
verification = self.api.account_verify_credentials()
if 'username' in verification:
return ['username']
else:
# error
raise Exception(repr(verification))
def publish(self, post, publication):
media_ids = []
arguments = dict()
for attachment in post.attachments:
if attachment.filename:
path = self.attachment_path(attachment.filename)
arguments = dict(media_file=path)
else:
arguments = dict(media_file=attachment.content,
mime_type=attachment.media_type)
if attachment.alt:
arguments['description'] = attachment.alt
try:
media = None
if arguments:
media = self.api.media_post(**arguments)
if media:
<|code_end|>
. Use current file imports:
from nose.tools import set_trace
from mastodon import Mastodon
from botfriend.bot import Publisher
and context (classes, functions, or code) from other files:
# Path: botfriend/bot.py
# class Publisher(object):
#
# """A way of publishing the output of a bot."""
#
# @classmethod
# def from_config(cls, bot, module, full_config):
# publish_config = full_config.get('publish', {})
# module_config = publish_config.get(module)
#
# # Try both publish.foo and publish._foo, in case the module
# # needs to import a package called 'foo' from elsewhere (see
# # _mastodon.py for an example.)
# publisher_module = None
# names = ('botfriend.publish.' + module, 'botfriend.publish._' + module)
# errors = []
# for module_name in names:
# try:
# publisher_module = importlib.import_module(module_name)
# break
# except ImportError as e:
# errors.append(e)
# if not publisher_module:
# raise ImportError(
# "Could not import publisher for %s; tried %s. Errors were: %r" % (
# module, ", ".join(names), errors
# )
# )
# publisher_class = getattr(publisher_module, "Publisher", None)
# if not publisher_class:
# raise Exception(
# "Loaded module %s but could not find a class called Publisher inside." % bot_module
# )
# try:
# publisher = publisher_class(bot, full_config, module_config)
# except Exception as e:
# raise Exception(
# "Could not import %s publisher for %s: %s" % (
# module_name, bot.name, str(e)
# )
# )
# publisher.service = module
# return publisher
#
# def __init__(self, service_name, bot, full_config, **config):
# self.service_name=service_name
# self.bot = bot
#
# def attachment_path(self, path):
# """Convert a path relative to the botfriend root to an absolute
# path."""
# d = os.path.split(__file__)[0]
# return os.path.join(d, path)
#
# def publish(self, post, publication):
# """Publish the content of the given Post object.
#
# This probably includes text but may also include binary
# objects.
#
# :param post: A Post object.
# :param previous_attempt: A Publication object describing the
# attempt to publish this post. It may have data left in it
# from a previous publication attempt.
# """
# raise NotImplementedError()
. Output only the next line. | media_ids.append(media['id']) |
Based on the snippet: <|code_start|>
#dj.config['external-odor'] = {'protocol': 'file',
# 'location': '/mnt/dj-stor01/pipeline-externals'}
schema = dj.schema('pipeline_odor', locals(), create_tables=False)
@schema
class Odorant(dj.Lookup):
definition = """ # Odorants used in solutions.
<|code_end|>
, predict the immediate next line with the help of imports:
import os
import h5py
import itertools
import numpy as np
import datajoint as dj
import bisect
import matplotlib.pyplot as plt
from commons import lab
from scipy import ndimage
from pipeline.utils import h5
from datajoint.hash import key_hash
from .exceptions import PipelineException
from pipeline import meso, stack, mice, experiment, shared
from scipy.spatial import distance
from mpl_toolkits.mplot3d import Axes3D
and context (classes, functions, sometimes code) from other files:
# Path: python/pipeline/exceptions.py
# class PipelineException(Exception):
# """Base pipeline exception. Prints the message plus any specific info."""
# def __init__(self, message, info=None):
# info_message = '\nError info: ' + repr(info) if info else ''
# super().__init__(message + info_message)
# self.info = info
. Output only the next line. | odorant : varchar(32) # name of odorant |
Based on the snippet: <|code_start|> self.img = img
self.draw_img = np.asarray(img / img.max(), dtype=float)
self.mask = 1 + 0 * img
self.exit = False
self.r = 40
self.X, self.Y = np.mgrid[:img.shape[0], :img.shape[1]]
def grab(self):
print('Contrast (std)', np.std(self.img))
img = np.asarray(self.img / self.img.max(), dtype=float)
cv2.namedWindow('real image')
cv2.setMouseCallback('real image', self, 0)
while not self.exit:
cv2.imshow('real image', img)
if (cv2.waitKey(0) & 0xFF) == ord('q'):
cv2.waitKey(1)
cv2.destroyAllWindows()
break
cv2.waitKey(2)
def __call__(self, event, x, y, flags, params):
# img = np.asarray(self.img , dtype=np.uint8)[...,None] * np.ones((1,1,3), dtype=np.uint8)
img = np.asarray(self.img / self.img.max(), dtype=float)
cv2.imshow('real image', self.draw_img)
if event == cv2.EVENT_LBUTTONDOWN:
print('Start Mouse Position: ' + str(x) + ', ' + str(y))
self.start = np.asarray([x, y])
<|code_end|>
, predict the immediate next line with the help of imports:
from collections import defaultdict
from itertools import count
from operator import attrgetter
from os import path as op
from tqdm import tqdm
from ..exceptions import PipelineException
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
import pickle
import cv2
import math
and context (classes, functions, sometimes code) from other files:
# Path: python/pipeline/exceptions.py
# class PipelineException(Exception):
# """Base pipeline exception. Prints the message plus any specific info."""
# def __init__(self, message, info=None):
# info_message = '\nError info: ' + repr(info) if info else ''
# super().__init__(message + info_message)
# self.info = info
. Output only the next line. | elif event == cv2.EVENT_LBUTTONUP: |
Predict the next line after this snippet: <|code_start|> [2, 'rigid2', '3-d cross-correlation (100 microns above and below estimated z)', 'python'],
[3, 'affine', ('exhaustive search of 3-d rotations + cross-correlation (40 microns'
'above and below estimated z)'), 'python'],
[4, 'affine2', ('exhaustive search of 3-d rotations + cross-correlation (100 microns'
'above and below estimated z)'), 'python'],
[5, 'non-rigid', 'affine plus deformation field learnt via gradient ascent on correlation', 'python']
]
@schema
class CurationMethod(dj.Lookup):
definition = """
curation_method : tinyint # method to curate the initial registration estimates
---
name : varchar(16) # short name to identify the curation method
details : varchar(255) # more details
language : enum('matlab', 'python') # implementation language
"""
contents = [
[1, 'none', 'estimates are left unchanged', 'python'],
[2, 'manual', 'manually inspect each field estimate', 'matlab'],
]
@schema
class AreaMaskMethod(dj.Lookup):
definition = """
# method for assigning cortex to visual areas
mask_method : tinyint # method to assign membership to visual areas
---
name : varchar(16)
<|code_end|>
using the current file's imports:
import datajoint as dj
import numpy as np
from scipy.signal import hamming, convolve, medfilt
from .exceptions import PipelineException
and any relevant context from other files:
# Path: python/pipeline/exceptions.py
# class PipelineException(Exception):
# """Base pipeline exception. Prints the message plus any specific info."""
# def __init__(self, message, info=None):
# info_message = '\nError info: ' + repr(info) if info else ''
# super().__init__(message + info_message)
# self.info = info
. Output only the next line. | details : varchar(255) |
Using the snippet: <|code_start|>__author__ = 'SolarLune'
# Random level generation module.
# TODO: Add rectangular rooms to the GenNodes room styles.
# CONSTANTS
# Connection styles for the GenNodes function;
GN_CONNECTION_STYLE_ONE = 0 # ONE = all nodes connect to another one (other than themselves) randomly
GN_CONNECTION_STYLE_ALL = 1 # ALL = Each node makes a connection to every other node
GN_CONNECTION_STYLE_HUB = 2 # HUB = Each node connects to a pre-determined node (like a spiderweb or a "splash")
GN_ROOM_STYLE_ROUND = 0 # Explodes a round room for the node
GN_ROOM_STYLE_SQUARE = 1 # Explodes a rectangular room for the node
RLG_POP_CEIL = 0 # When
<|code_end|>
, determine the next line of code. You have imports:
import random
import math
import copy
import mathutils
from bge import logic
from .mesh import get_dimensions
from .math import clamp
and context (class names, function names, or code) available:
# Path: BGE/bghelper/mesh.py
# def get_dimensions(object=None, roundit=3, offset=1, meshnum=0, factor_in_scale=1):
# """
# Gets the dimensions of the object (what you see under dimensions in the properties window in the 3D menu).
# mesh = which mesh to use to get the object's dimensions.
# roundit = how far down to round the returned dimension values; set it to a negative number to not round the numbers off at all.
# offset = Whether or not to return the offset point of the dimensions (the center point);
# This negated (-offset, literally) is the origin point, generally.
# meshnum = The index of the mesh to use. Usually 0 is okay.
# factor_in_scale = If it should multiply the dimensions by the object's world scale.
# """
#
# if object == None:
# object = logic.getCurrentController().owner
#
# s = object.worldScale
#
# mesh = object.meshes[meshnum]
#
# # print (dir(mesh))
#
# verts = [[], [], []]
#
# originpos = [0, 0, 0]
#
# for mat in range(len(mesh.materials)):
#
# for v in range(mesh.getVertexArrayLength(mat)):
# vert = mesh.getVertex(mat, v)
#
# pos = vert.getXYZ()
#
# verts[0].append(pos[0])
# verts[1].append(pos[1])
# verts[2].append(pos[2])
#
# verts[0].sort()
# verts[1].sort()
# verts[2].sort()
#
# if offset != 0:
# offsetpos = [
# (verts[0][len(verts[0]) - 1] + verts[0][0]) / 2,
# (verts[1][len(verts[1]) - 1] + verts[1][0]) / 2,
# (verts[2][len(verts[2]) - 1] + verts[2][0]) / 2,
# ]
#
# size = [(verts[0][len(verts[0]) - 1] - verts[0][0]),
# (verts[1][len(verts[0]) - 1] - verts[1][0]),
# (verts[2][len(verts[0]) - 1] - verts[2][0])]
#
# if factor_in_scale:
# size = [size[0] * s[0],
# size[1] * s[1],
# size[2] * s[2]]
#
# if roundit >= 0:
# size = [
# round(size[0], roundit),
# round(size[1], roundit),
# round(size[2], roundit),
# ]
#
# if offset:
# return (mathutils.Vector(size), mathutils.Vector(offsetpos))
#
# else:
# return (mathutils.Vector(size), None)
#
# Path: BGE/bghelper/math.py
# def clamp(value, minimum, maximum):
# """
# Clamp: Clamps the specified 'value' between the maximum and minimum values.
# Returns 'max' when 'value' is greater than 'max', 'min' when 'value' is less than 'min',
# and 'value' itself when neither is true.
# """
# return (min(max(value, minimum), maximum))
. Output only the next line. | RLG_POP_END = 1 |
Based on the snippet: <|code_start|>__author__ = 'SolarLune'
# Random level generation module.
# TODO: Add rectangular rooms to the GenNodes room styles.
# CONSTANTS
# Connection styles for the GenNodes function;
GN_CONNECTION_STYLE_ONE = 0 # ONE = all nodes connect to another one (other than themselves) randomly
GN_CONNECTION_STYLE_ALL = 1 # ALL = Each node makes a connection to every other node
GN_CONNECTION_STYLE_HUB = 2 # HUB = Each node connects to a pre-determined node (like a spiderweb or a "splash")
GN_ROOM_STYLE_ROUND = 0 # Explodes a round room for the node
GN_ROOM_STYLE_SQUARE = 1 # Explodes a rectangular room for the node
RLG_POP_CEIL = 0 # When
RLG_POP_END = 1
<|code_end|>
, predict the immediate next line with the help of imports:
import random
import math
import copy
import mathutils
from bge import logic
from .mesh import get_dimensions
from .math import clamp
and context (classes, functions, sometimes code) from other files:
# Path: BGE/bghelper/mesh.py
# def get_dimensions(object=None, roundit=3, offset=1, meshnum=0, factor_in_scale=1):
# """
# Gets the dimensions of the object (what you see under dimensions in the properties window in the 3D menu).
# mesh = which mesh to use to get the object's dimensions.
# roundit = how far down to round the returned dimension values; set it to a negative number to not round the numbers off at all.
# offset = Whether or not to return the offset point of the dimensions (the center point);
# This negated (-offset, literally) is the origin point, generally.
# meshnum = The index of the mesh to use. Usually 0 is okay.
# factor_in_scale = If it should multiply the dimensions by the object's world scale.
# """
#
# if object == None:
# object = logic.getCurrentController().owner
#
# s = object.worldScale
#
# mesh = object.meshes[meshnum]
#
# # print (dir(mesh))
#
# verts = [[], [], []]
#
# originpos = [0, 0, 0]
#
# for mat in range(len(mesh.materials)):
#
# for v in range(mesh.getVertexArrayLength(mat)):
# vert = mesh.getVertex(mat, v)
#
# pos = vert.getXYZ()
#
# verts[0].append(pos[0])
# verts[1].append(pos[1])
# verts[2].append(pos[2])
#
# verts[0].sort()
# verts[1].sort()
# verts[2].sort()
#
# if offset != 0:
# offsetpos = [
# (verts[0][len(verts[0]) - 1] + verts[0][0]) / 2,
# (verts[1][len(verts[1]) - 1] + verts[1][0]) / 2,
# (verts[2][len(verts[2]) - 1] + verts[2][0]) / 2,
# ]
#
# size = [(verts[0][len(verts[0]) - 1] - verts[0][0]),
# (verts[1][len(verts[0]) - 1] - verts[1][0]),
# (verts[2][len(verts[0]) - 1] - verts[2][0])]
#
# if factor_in_scale:
# size = [size[0] * s[0],
# size[1] * s[1],
# size[2] * s[2]]
#
# if roundit >= 0:
# size = [
# round(size[0], roundit),
# round(size[1], roundit),
# round(size[2], roundit),
# ]
#
# if offset:
# return (mathutils.Vector(size), mathutils.Vector(offsetpos))
#
# else:
# return (mathutils.Vector(size), None)
#
# Path: BGE/bghelper/math.py
# def clamp(value, minimum, maximum):
# """
# Clamp: Clamps the specified 'value' between the maximum and minimum values.
# Returns 'max' when 'value' is greater than 'max', 'min' when 'value' is less than 'min',
# and 'value' itself when neither is true.
# """
# return (min(max(value, minimum), maximum))
. Output only the next line. | RLG_POP_STRAIGHT = 2 |
Based on the snippet: <|code_start|># Copyright 2021 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class CheckVersionTest(tf.test.TestCase):
def test_base(self):
tf_df_version = "1.2.3" # Does not matter.
self.assertTrue(
check_version.check_version(
tf_df_version, ["2.6.0", "2.6.1"], "2.6.0", external_logic=True))
self.assertFalse(
check_version.check_version(
tf_df_version, ["2.6.0", "2.6.1"],
<|code_end|>
, predict the immediate next line with the help of imports:
import tensorflow as tf
from tensorflow_decision_forests.tensorflow import check_version
and context (classes, functions, sometimes code) from other files:
# Path: tensorflow_decision_forests/tensorflow/check_version.py
# def check_version(tf_df_version,
# compatible_tf_versions,
# tf_version=None,
# external_logic=False):
# """Checks the compatibility of the TF version.
#
# Prints a warning message and return False in care of likely incompatible
# versions.
# """
#
# if not external_logic:
# pass
#
# if tf_version is None:
# tf_version = tf.__version__
# if tf_version not in compatible_tf_versions:
# logging.warning(
# "TensorFlow Decision Forests %s is compatible with the following "
# "TensorFlow Versions: %s. However, TensorFlow %s was detected. "
# "This can cause issues with the TF API and symbols in the custom C++ "
# "ops. See the TF and TF-DF compatibility table at "
# "https://github.com/tensorflow/decision-forests/blob/main/documentation/known_issues.md#compatibility-table.",
# tf_df_version, compatible_tf_versions, tf_version)
# return False
# return True
. Output only the next line. | "2.8.0-dev20211105", |
Given the code snippet: <|code_start|>from __future__ import division
from __future__ import print_function
def toy_dataspec():
dataspec = data_spec_pb2.DataSpecification()
f1 = dataspec.columns.add()
f1.name = "f1"
f1.type = data_spec_pb2.ColumnType.NUMERICAL
f2 = dataspec.columns.add()
f2.name = "f2"
f2.type = data_spec_pb2.ColumnType.CATEGORICAL
f2.categorical.number_of_unique_values = 3
f2.categorical.items["<OOD>"].index = 0
f2.categorical.items["x"].index = 1
f2.categorical.items["y"].index = 2
f3 = dataspec.columns.add()
f3.name = "f3"
f3.type = data_spec_pb2.ColumnType.CATEGORICAL
f3.categorical.number_of_unique_values = 3
f3.categorical.is_already_integerized = True
f4 = dataspec.columns.add()
f4.name = "f4"
f4.type = data_spec_pb2.ColumnType.DISCRETIZED_NUMERICAL
<|code_end|>
, generate the next line using the imports in this file:
import math
import tensorflow as tf
from absl.testing import parameterized
from tensorflow_decision_forests.component.py_tree import dataspec as dataspec_lib
from yggdrasil_decision_forests.dataset import data_spec_pb2
and context (functions, classes, or occasionally code) from other files:
# Path: tensorflow_decision_forests/component/py_tree/dataspec.py
# OUT_OF_DICTIONARY = "<OOD>"
# class SimpleColumnSpec(NamedTuple):
# def __repr__(self):
# def make_simple_column_spec(dataspec: data_spec_pb2.DataSpecification,
# col_idx: int) -> SimpleColumnSpec:
# def categorical_value_idx_to_value(column_spec: data_spec_pb2.Column,
# value_idx: int) -> Union[int, str]:
# def categorical_column_dictionary_to_list(
# column_spec: data_spec_pb2.Column) -> List[str]:
# def label_value_idx_to_value(column_spec: data_spec_pb2.Column,
# value_idx: int) -> Union[int, str]:
# def discretized_numerical_to_numerical(column_spec: data_spec_pb2.Column,
# value: int) -> float:
# def column_name_to_column_idx(name: str,
# dataspec: data_spec_pb2.DataSpecification) -> int:
. Output only the next line. | f4.discretized_numerical.boundaries[:] = [0, 1, 2] |
Based on the snippet: <|code_start|># Copyright 2021 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
ops = tf.load_op_library(resource_loader.get_path_to_datafile("training.so"))
except Exception as e:
check_version.info_fail_to_load_custom_op(e, "training.so")
raise e
# Importing all the symbols.
module = sys.modules[__name__]
<|code_end|>
, predict the immediate next line with the help of imports:
from tensorflow_decision_forests.tensorflow import check_version
from tensorflow.python.platform import resource_loader
import tensorflow as tf
import sys
and context (classes, functions, sometimes code) from other files:
# Path: tensorflow_decision_forests/tensorflow/check_version.py
# def check_version(tf_df_version,
# compatible_tf_versions,
# tf_version=None,
# external_logic=False):
# """Checks the compatibility of the TF version.
#
# Prints a warning message and return False in care of likely incompatible
# versions.
# """
#
# if not external_logic:
# pass
#
# if tf_version is None:
# tf_version = tf.__version__
# if tf_version not in compatible_tf_versions:
# logging.warning(
# "TensorFlow Decision Forests %s is compatible with the following "
# "TensorFlow Versions: %s. However, TensorFlow %s was detected. "
# "This can cause issues with the TF API and symbols in the custom C++ "
# "ops. See the TF and TF-DF compatibility table at "
# "https://github.com/tensorflow/decision-forests/blob/main/documentation/known_issues.md#compatibility-table.",
# tf_df_version, compatible_tf_versions, tf_version)
# return False
# return True
. Output only the next line. | for name, value in ops.__dict__.items(): |
Given snippet: <|code_start|>from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
class ValueTest(parameterized.TestCase, tf.test.TestCase):
def test_regression(self):
value = value_lib.RegressionValue(
value=5.0, num_examples=10, standard_deviation=1.0)
logging.info("value:\n%s", value)
def test_probability(self):
value = value_lib.ProbabilityValue(
probability=[0.5, 0.4, 0.1], num_examples=10)
logging.info("value:\n%s", value)
def test_core_value_to_value_classifier(self):
core_node = decision_tree_pb2.Node()
core_node.classifier.distribution.counts[:] = [0.0, 8.0, 2.0]
core_node.classifier.distribution.sum = 10.0
self.assertEqual(
value_lib.core_value_to_value(core_node),
value_lib.ProbabilityValue(probability=[0.8, 0.2], num_examples=10))
def test_core_value_to_value_regressor(self):
core_node = decision_tree_pb2.Node()
core_node.regressor.top_value = 1.0
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from absl import logging
from absl.testing import parameterized
from tensorflow_decision_forests.component.py_tree import value as value_lib
from yggdrasil_decision_forests.model.decision_tree import decision_tree_pb2
import tensorflow as tf
and context:
# Path: tensorflow_decision_forests/component/py_tree/value.py
# @property
# def value(self):
# return self._value
which might include code, classes, or functions. Output only the next line. | core_node.regressor.distribution.sum = 10.0 |
Predict the next line for this snippet: <|code_start|>
# pylint: disable=g-long-lambda
class ObjectiveTest(parameterized.TestCase, tf.test.TestCase):
def test_classification(self):
objective = objective_lib.ClassificationObjective(
label="label", num_classes=5)
logging.info("objective: %s", objective)
objective = objective_lib.ClassificationObjective(
label="label", classes=["a", "b"])
logging.info("objective: %s", objective)
objective = objective_lib.ClassificationObjective(
label="label", classes=["a", "b"])
logging.info("objective: %s", objective)
objective = objective_lib.ClassificationObjective(
label="label", classes=["a", "b"], num_classes=2)
logging.info("objective: %s", objective)
def test_classification_errors(self):
self.assertRaises(
ValueError,
lambda: objective_lib.ClassificationObjective(label="label"))
self.assertRaises(
ValueError,
lambda: objective_lib.ClassificationObjective(label="", num_classes=5))
<|code_end|>
with the help of current file imports:
from absl import logging
from absl.testing import parameterized
from tensorflow_decision_forests.component.py_tree import objective as objective_lib
import tensorflow as tf
and context from other files:
# Path: tensorflow_decision_forests/component/py_tree/objective.py
# class AbstractObjective(object):
# class ClassificationObjective(AbstractObjective):
# class RegressionObjective(AbstractObjective):
# class RankingObjective(AbstractObjective):
# def __init__(self, label: str):
# def label(self):
# def task(self) -> Task:
# def __init__(self,
# label: str,
# classes: Optional[List[str]] = None,
# num_classes: Optional[int] = None):
# def num_classes(self) -> int:
# def classes(self) -> Optional[List[str]]:
# def has_integer_labels(self) -> bool:
# def task(self) -> "Task":
# def __repr__(self):
# def __eq__(self, other):
# def task(self) -> Task:
# def __repr__(self):
# def __eq__(self, other):
# def __init__(self, label: str, group: str):
# def group(self) -> str:
# def task(self) -> Task:
# def __repr__(self):
# def __eq__(self, other):
, which may contain function names, class names, or code. Output only the next line. | self.assertRaises( |
Here is a snippet: <|code_start|> "external/ydf/yggdrasil_decision_forests/test_data")
class TfOpTest(parameterized.TestCase, tf.test.TestCase):
@parameterized.named_parameters(
("base", False, False),
("boolean", True, False),
("catset", False, True),
)
def test_toy_rf_classification_winner_takes_all(self, add_boolean_features,
has_catset):
# Create toy model.
model_path = os.path.join(
tempfile.mkdtemp(dir=self.get_temp_dir()), "test_basic_rf_wta")
test_utils.build_toy_random_forest(
model_path,
winner_take_all_inference=True,
add_boolean_features=add_boolean_features,
has_catset=has_catset)
features = test_utils.build_toy_input_feature_values(
features=None, has_catset=has_catset)
# Prepare model.
model = inference.Model(model_path)
@tf.function
def init_model():
tf.print("Loading model")
<|code_end|>
. Write the next line using the current file imports:
import concurrent.futures
import os
import tempfile
import tensorflow as tf
from absl import logging
from absl.testing import parameterized
from tensorflow_decision_forests.tensorflow.ops.inference import api as inference
from tensorflow_decision_forests.tensorflow.ops.inference import test_utils
from absl import flags
and context from other files:
# Path: tensorflow_decision_forests/tensorflow/ops/inference/api.py
# MISSING_NON_INTEGERIZED_CATEGORICAL_STORED_AS_INT = 0x7FFFFFFF - 2
# class Model(object):
# class ModelV2(tracking.AutoTrackable):
# class _InferenceArgsBuilder(tracking.AutoTrackable):
# class _AbstractModelLoader(six.with_metaclass(abc.ABCMeta, object)):
# class _CompiledSimpleMLModelResource(tracking.TrackableResource):
# class _DiskModelLoader(_AbstractModelLoader, tracking.AutoTrackable):
# def __init__(self,
# model_path: Text,
# tensor_model_path: Optional[Tensor] = None,
# verbose: Optional[bool] = True):
# def init_op(self) -> InitOp:
# def apply(self, features: Dict[Text, Tensor]) -> ModelOutput:
# def __init__(self,
# model_path: Text,
# verbose: Optional[bool] = True,
# output_types: Optional[List[str]] = []):
# def apply_get_leaves(self, features: Dict[Text, Tensor]) -> Any:
# def apply(self, features: Dict[Text, Tensor]) -> ModelOutput:
# def _create_model_identifier() -> Text:
# def __init__(self, verbose: Optional[bool] = True):
# def build_from_model_path(self, model_path: Text):
# def build_from_dataspec_and_header(self,
# dataspec: data_spec_pb2.DataSpecification,
# header: abstract_model_pb2.AbstractModel):
# def init_op(self) -> Tensor:
# def build_inference_op_args(
# self,
# features: Dict[Text, Tensor],
# output_leaves: Optional[bool] = False) -> Dict[Text, Any]:
# def _register_input_feature(self, name: Text, value: Tensor,
# feature_maps: FeatureMaps) -> None:
# def _create_str_to_int_tables(self):
# def _dict_to_list_sorted_by_key(src: Dict[Any, Any]) -> List[Any]:
# def _all_feature_idxs(feature_maps: FeatureMaps):
# def _check_all_input_features_are_provided(self, feature_maps):
# def _get_dense_output_dim(self):
# def _prepare_and_check_numerical_feature(self, name: Text, value: Tensor):
# def _prepare_and_check_boolean_feature(self, name: Text, value: Tensor):
# def _prepare_and_check_categorical_feature(
# self, name: Text, value: Tensor,
# feature_spec: data_spec_pb2.Column) -> Tensor:
# def _prepare_and_check_categorical_set_feature(
# self, name: Text, value: Tensor,
# feature_spec: data_spec_pb2.Column) -> Tensor:
# def initialize(self, model: "_CompiledSimpleMLModelResource") -> tf.Operation:
# def __init__(self, model_loader: _AbstractModelLoader):
# def _create_resource(self):
# def _initialize(self):
# def __init__(self, model_path, output_types: List[str]):
# def initialize(self, model: _CompiledSimpleMLModelResource) -> tf.Operation:
# def get_model_path(self) -> Tensor:
#
# Path: tensorflow_decision_forests/tensorflow/ops/inference/test_utils.py
# def build_toy_data_spec(add_boolean_features=False, has_catset=False):
# def build_toy_random_forest(path,
# winner_take_all_inference,
# add_boolean_features=False,
# has_catset=False,
# num_trees=2):
# def build_toy_gbdt(path, num_classes):
# def build_toy_input_features(use_rank_two=False, has_catset=False):
# def build_toy_input_feature_values(features,
# use_rank_two=False,
# has_catset=False):
# def shape(x):
# def __getitem__(self, key):
# def expected_toy_predictions_rf_weighted(add_boolean_features=False):
# def expected_toy_predictions_rf_wta(add_boolean_features=False,
# has_catset=False):
# def expected_toy_predictions_gbdt_binary():
# def expected_toy_predictions_gbdt_multiclass():
# class Identity:
, which may include functions, classes, or code. Output only the next line. | model.init_op() |
Predict the next line after this snippet: <|code_start|> "external/ydf/yggdrasil_decision_forests/test_data")
class TfOpTest(parameterized.TestCase, tf.test.TestCase):
@parameterized.named_parameters(
("base", False, False),
("boolean", True, False),
("catset", False, True),
)
def test_toy_rf_classification_winner_takes_all(self, add_boolean_features,
has_catset):
# Create toy model.
model_path = os.path.join(
tempfile.mkdtemp(dir=self.get_temp_dir()), "test_basic_rf_wta")
test_utils.build_toy_random_forest(
model_path,
winner_take_all_inference=True,
add_boolean_features=add_boolean_features,
has_catset=has_catset)
features = test_utils.build_toy_input_feature_values(
features=None, has_catset=has_catset)
# Prepare model.
model = inference.Model(model_path)
@tf.function
def init_model():
tf.print("Loading model")
<|code_end|>
using the current file's imports:
import concurrent.futures
import os
import tempfile
import tensorflow as tf
from absl import logging
from absl.testing import parameterized
from tensorflow_decision_forests.tensorflow.ops.inference import api as inference
from tensorflow_decision_forests.tensorflow.ops.inference import test_utils
from absl import flags
and any relevant context from other files:
# Path: tensorflow_decision_forests/tensorflow/ops/inference/api.py
# MISSING_NON_INTEGERIZED_CATEGORICAL_STORED_AS_INT = 0x7FFFFFFF - 2
# class Model(object):
# class ModelV2(tracking.AutoTrackable):
# class _InferenceArgsBuilder(tracking.AutoTrackable):
# class _AbstractModelLoader(six.with_metaclass(abc.ABCMeta, object)):
# class _CompiledSimpleMLModelResource(tracking.TrackableResource):
# class _DiskModelLoader(_AbstractModelLoader, tracking.AutoTrackable):
# def __init__(self,
# model_path: Text,
# tensor_model_path: Optional[Tensor] = None,
# verbose: Optional[bool] = True):
# def init_op(self) -> InitOp:
# def apply(self, features: Dict[Text, Tensor]) -> ModelOutput:
# def __init__(self,
# model_path: Text,
# verbose: Optional[bool] = True,
# output_types: Optional[List[str]] = []):
# def apply_get_leaves(self, features: Dict[Text, Tensor]) -> Any:
# def apply(self, features: Dict[Text, Tensor]) -> ModelOutput:
# def _create_model_identifier() -> Text:
# def __init__(self, verbose: Optional[bool] = True):
# def build_from_model_path(self, model_path: Text):
# def build_from_dataspec_and_header(self,
# dataspec: data_spec_pb2.DataSpecification,
# header: abstract_model_pb2.AbstractModel):
# def init_op(self) -> Tensor:
# def build_inference_op_args(
# self,
# features: Dict[Text, Tensor],
# output_leaves: Optional[bool] = False) -> Dict[Text, Any]:
# def _register_input_feature(self, name: Text, value: Tensor,
# feature_maps: FeatureMaps) -> None:
# def _create_str_to_int_tables(self):
# def _dict_to_list_sorted_by_key(src: Dict[Any, Any]) -> List[Any]:
# def _all_feature_idxs(feature_maps: FeatureMaps):
# def _check_all_input_features_are_provided(self, feature_maps):
# def _get_dense_output_dim(self):
# def _prepare_and_check_numerical_feature(self, name: Text, value: Tensor):
# def _prepare_and_check_boolean_feature(self, name: Text, value: Tensor):
# def _prepare_and_check_categorical_feature(
# self, name: Text, value: Tensor,
# feature_spec: data_spec_pb2.Column) -> Tensor:
# def _prepare_and_check_categorical_set_feature(
# self, name: Text, value: Tensor,
# feature_spec: data_spec_pb2.Column) -> Tensor:
# def initialize(self, model: "_CompiledSimpleMLModelResource") -> tf.Operation:
# def __init__(self, model_loader: _AbstractModelLoader):
# def _create_resource(self):
# def _initialize(self):
# def __init__(self, model_path, output_types: List[str]):
# def initialize(self, model: _CompiledSimpleMLModelResource) -> tf.Operation:
# def get_model_path(self) -> Tensor:
#
# Path: tensorflow_decision_forests/tensorflow/ops/inference/test_utils.py
# def build_toy_data_spec(add_boolean_features=False, has_catset=False):
# def build_toy_random_forest(path,
# winner_take_all_inference,
# add_boolean_features=False,
# has_catset=False,
# num_trees=2):
# def build_toy_gbdt(path, num_classes):
# def build_toy_input_features(use_rank_two=False, has_catset=False):
# def build_toy_input_feature_values(features,
# use_rank_two=False,
# has_catset=False):
# def shape(x):
# def __getitem__(self, key):
# def expected_toy_predictions_rf_weighted(add_boolean_features=False):
# def expected_toy_predictions_rf_wta(add_boolean_features=False,
# has_catset=False):
# def expected_toy_predictions_gbdt_binary():
# def expected_toy_predictions_gbdt_multiclass():
# class Identity:
. Output only the next line. | model.init_op() |
Given the code snippet: <|code_start|> features, has_catset=has_catset))
logging.info("dense_predictions_values: %s", dense_predictions_values)
logging.info("dense_col_representation_values: %s",
dense_col_representation_values)
expected_proba, expected_classes = test_utils.expected_toy_predictions_rf_wta(
add_boolean_features=add_boolean_features, has_catset=has_catset)
self.assertAllEqual(dense_col_representation_values, expected_classes)
self.assertAllClose(dense_predictions_values, expected_proba)
@parameterized.named_parameters(("base", False), ("boolean", True))
def test_toy_rf_classification_weighted(self, add_boolean_features):
with tf.Graph().as_default():
# Create toy model.
model_path = os.path.join(
tempfile.mkdtemp(dir=self.get_temp_dir()), "test_basic_rf_weighted")
test_utils.build_toy_random_forest(
model_path,
winner_take_all_inference=False,
add_boolean_features=add_boolean_features)
features = test_utils.build_toy_input_features()
# Prepare model.
model = inference.Model(model_path)
predictions = model.apply(features)
# Run model on toy dataset.
with self.session() as sess:
<|code_end|>
, generate the next line using the imports in this file:
import os
import tempfile
import tensorflow.compat.v1 as tf
from tensorflow_decision_forests.tensorflow.ops.inference import api as inference
from tensorflow_decision_forests.tensorflow.ops.inference import test_utils
from absl.testing import parameterized
from absl import flags
from absl import logging
and context (functions, classes, or occasionally code) from other files:
# Path: tensorflow_decision_forests/tensorflow/ops/inference/api.py
# MISSING_NON_INTEGERIZED_CATEGORICAL_STORED_AS_INT = 0x7FFFFFFF - 2
# class Model(object):
# class ModelV2(tracking.AutoTrackable):
# class _InferenceArgsBuilder(tracking.AutoTrackable):
# class _AbstractModelLoader(six.with_metaclass(abc.ABCMeta, object)):
# class _CompiledSimpleMLModelResource(tracking.TrackableResource):
# class _DiskModelLoader(_AbstractModelLoader, tracking.AutoTrackable):
# def __init__(self,
# model_path: Text,
# tensor_model_path: Optional[Tensor] = None,
# verbose: Optional[bool] = True):
# def init_op(self) -> InitOp:
# def apply(self, features: Dict[Text, Tensor]) -> ModelOutput:
# def __init__(self,
# model_path: Text,
# verbose: Optional[bool] = True,
# output_types: Optional[List[str]] = []):
# def apply_get_leaves(self, features: Dict[Text, Tensor]) -> Any:
# def apply(self, features: Dict[Text, Tensor]) -> ModelOutput:
# def _create_model_identifier() -> Text:
# def __init__(self, verbose: Optional[bool] = True):
# def build_from_model_path(self, model_path: Text):
# def build_from_dataspec_and_header(self,
# dataspec: data_spec_pb2.DataSpecification,
# header: abstract_model_pb2.AbstractModel):
# def init_op(self) -> Tensor:
# def build_inference_op_args(
# self,
# features: Dict[Text, Tensor],
# output_leaves: Optional[bool] = False) -> Dict[Text, Any]:
# def _register_input_feature(self, name: Text, value: Tensor,
# feature_maps: FeatureMaps) -> None:
# def _create_str_to_int_tables(self):
# def _dict_to_list_sorted_by_key(src: Dict[Any, Any]) -> List[Any]:
# def _all_feature_idxs(feature_maps: FeatureMaps):
# def _check_all_input_features_are_provided(self, feature_maps):
# def _get_dense_output_dim(self):
# def _prepare_and_check_numerical_feature(self, name: Text, value: Tensor):
# def _prepare_and_check_boolean_feature(self, name: Text, value: Tensor):
# def _prepare_and_check_categorical_feature(
# self, name: Text, value: Tensor,
# feature_spec: data_spec_pb2.Column) -> Tensor:
# def _prepare_and_check_categorical_set_feature(
# self, name: Text, value: Tensor,
# feature_spec: data_spec_pb2.Column) -> Tensor:
# def initialize(self, model: "_CompiledSimpleMLModelResource") -> tf.Operation:
# def __init__(self, model_loader: _AbstractModelLoader):
# def _create_resource(self):
# def _initialize(self):
# def __init__(self, model_path, output_types: List[str]):
# def initialize(self, model: _CompiledSimpleMLModelResource) -> tf.Operation:
# def get_model_path(self) -> Tensor:
#
# Path: tensorflow_decision_forests/tensorflow/ops/inference/test_utils.py
# def build_toy_data_spec(add_boolean_features=False, has_catset=False):
# def build_toy_random_forest(path,
# winner_take_all_inference,
# add_boolean_features=False,
# has_catset=False,
# num_trees=2):
# def build_toy_gbdt(path, num_classes):
# def build_toy_input_features(use_rank_two=False, has_catset=False):
# def build_toy_input_feature_values(features,
# use_rank_two=False,
# has_catset=False):
# def shape(x):
# def __getitem__(self, key):
# def expected_toy_predictions_rf_weighted(add_boolean_features=False):
# def expected_toy_predictions_rf_wta(add_boolean_features=False,
# has_catset=False):
# def expected_toy_predictions_gbdt_binary():
# def expected_toy_predictions_gbdt_multiclass():
# class Identity:
. Output only the next line. | sess.run(model.init_op()) |
Using the snippet: <|code_start|> @parameterized.named_parameters(("base", False), ("boolean", True))
def test_toy_rf_classification_weighted(self, add_boolean_features):
with tf.Graph().as_default():
# Create toy model.
model_path = os.path.join(
tempfile.mkdtemp(dir=self.get_temp_dir()), "test_basic_rf_weighted")
test_utils.build_toy_random_forest(
model_path,
winner_take_all_inference=False,
add_boolean_features=add_boolean_features)
features = test_utils.build_toy_input_features()
# Prepare model.
model = inference.Model(model_path)
predictions = model.apply(features)
# Run model on toy dataset.
with self.session() as sess:
sess.run(model.init_op())
dense_predictions_values, dense_col_representation_values = sess.run([
predictions.dense_predictions, predictions.dense_col_representation
], test_utils.build_toy_input_feature_values(features))
logging.info("dense_predictions_values: %s", dense_predictions_values)
logging.info("dense_col_representation_values: %s",
dense_col_representation_values)
expected_proba, expected_classes = test_utils.expected_toy_predictions_rf_weighted(
<|code_end|>
, determine the next line of code. You have imports:
import os
import tempfile
import tensorflow.compat.v1 as tf
from tensorflow_decision_forests.tensorflow.ops.inference import api as inference
from tensorflow_decision_forests.tensorflow.ops.inference import test_utils
from absl.testing import parameterized
from absl import flags
from absl import logging
and context (class names, function names, or code) available:
# Path: tensorflow_decision_forests/tensorflow/ops/inference/api.py
# MISSING_NON_INTEGERIZED_CATEGORICAL_STORED_AS_INT = 0x7FFFFFFF - 2
# class Model(object):
# class ModelV2(tracking.AutoTrackable):
# class _InferenceArgsBuilder(tracking.AutoTrackable):
# class _AbstractModelLoader(six.with_metaclass(abc.ABCMeta, object)):
# class _CompiledSimpleMLModelResource(tracking.TrackableResource):
# class _DiskModelLoader(_AbstractModelLoader, tracking.AutoTrackable):
# def __init__(self,
# model_path: Text,
# tensor_model_path: Optional[Tensor] = None,
# verbose: Optional[bool] = True):
# def init_op(self) -> InitOp:
# def apply(self, features: Dict[Text, Tensor]) -> ModelOutput:
# def __init__(self,
# model_path: Text,
# verbose: Optional[bool] = True,
# output_types: Optional[List[str]] = []):
# def apply_get_leaves(self, features: Dict[Text, Tensor]) -> Any:
# def apply(self, features: Dict[Text, Tensor]) -> ModelOutput:
# def _create_model_identifier() -> Text:
# def __init__(self, verbose: Optional[bool] = True):
# def build_from_model_path(self, model_path: Text):
# def build_from_dataspec_and_header(self,
# dataspec: data_spec_pb2.DataSpecification,
# header: abstract_model_pb2.AbstractModel):
# def init_op(self) -> Tensor:
# def build_inference_op_args(
# self,
# features: Dict[Text, Tensor],
# output_leaves: Optional[bool] = False) -> Dict[Text, Any]:
# def _register_input_feature(self, name: Text, value: Tensor,
# feature_maps: FeatureMaps) -> None:
# def _create_str_to_int_tables(self):
# def _dict_to_list_sorted_by_key(src: Dict[Any, Any]) -> List[Any]:
# def _all_feature_idxs(feature_maps: FeatureMaps):
# def _check_all_input_features_are_provided(self, feature_maps):
# def _get_dense_output_dim(self):
# def _prepare_and_check_numerical_feature(self, name: Text, value: Tensor):
# def _prepare_and_check_boolean_feature(self, name: Text, value: Tensor):
# def _prepare_and_check_categorical_feature(
# self, name: Text, value: Tensor,
# feature_spec: data_spec_pb2.Column) -> Tensor:
# def _prepare_and_check_categorical_set_feature(
# self, name: Text, value: Tensor,
# feature_spec: data_spec_pb2.Column) -> Tensor:
# def initialize(self, model: "_CompiledSimpleMLModelResource") -> tf.Operation:
# def __init__(self, model_loader: _AbstractModelLoader):
# def _create_resource(self):
# def _initialize(self):
# def __init__(self, model_path, output_types: List[str]):
# def initialize(self, model: _CompiledSimpleMLModelResource) -> tf.Operation:
# def get_model_path(self) -> Tensor:
#
# Path: tensorflow_decision_forests/tensorflow/ops/inference/test_utils.py
# def build_toy_data_spec(add_boolean_features=False, has_catset=False):
# def build_toy_random_forest(path,
# winner_take_all_inference,
# add_boolean_features=False,
# has_catset=False,
# num_trees=2):
# def build_toy_gbdt(path, num_classes):
# def build_toy_input_features(use_rank_two=False, has_catset=False):
# def build_toy_input_feature_values(features,
# use_rank_two=False,
# has_catset=False):
# def shape(x):
# def __getitem__(self, key):
# def expected_toy_predictions_rf_weighted(add_boolean_features=False):
# def expected_toy_predictions_rf_wta(add_boolean_features=False,
# has_catset=False):
# def expected_toy_predictions_gbdt_binary():
# def expected_toy_predictions_gbdt_multiclass():
# class Identity:
. Output only the next line. | add_boolean_features=add_boolean_features) |
Here is a snippet: <|code_start|>
class ConditionTest(parameterized.TestCase, tf.test.TestCase):
def test_column_spec_bitmap_to_items_integer(self):
column_spec = data_spec_pb2.Column()
column_spec.categorical.number_of_unique_values = 10
column_spec.categorical.is_already_integerized = True
# b1100101101 => 32Dh
self.assertEqual(
condition_lib.column_spec_bitmap_to_items(column_spec, b"\x2D\x03"),
[0, 2, 3, 5, 8, 9])
def test_column_spec_bitmap_to_items_string(self):
column_spec = data_spec_pb2.Column()
column_spec.categorical.number_of_unique_values = 10
for i in range(10):
column_spec.categorical.items[f"item_{i}"].index = i
column_spec.categorical.is_already_integerized = False
# 1100101101b => 32Dh
self.assertEqual(
condition_lib.column_spec_bitmap_to_items(column_spec, b"\x2D\x03"),
["item_0", "item_2", "item_3", "item_5", "item_8", "item_9"])
def test_core_condition_to_condition_is_missing(self):
core_condition = decision_tree_pb2.NodeCondition()
core_condition.na_value = False
core_condition.attribute = 0
core_condition.condition.na_condition.SetInParent()
dataspec = data_spec_pb2.DataSpecification()
<|code_end|>
. Write the next line using the current file imports:
from absl.testing import parameterized
from tensorflow_decision_forests.component.py_tree import condition as condition_lib
from tensorflow_decision_forests.component.py_tree import dataspec as dataspec_lib
from yggdrasil_decision_forests.dataset import data_spec_pb2
from yggdrasil_decision_forests.model.decision_tree import decision_tree_pb2
import tensorflow as tf
and context from other files:
# Path: tensorflow_decision_forests/component/py_tree/condition.py
# class AbstractCondition(object):
# class IsMissingInCondition(AbstractCondition):
# class IsTrueCondition(AbstractCondition):
# class NumericalHigherThanCondition(AbstractCondition):
# class CategoricalIsInCondition(AbstractCondition):
# class CategoricalSetContainsCondition(AbstractCondition):
# class NumericalSparseObliqueCondition(AbstractCondition):
# def __init__(self, missing_evaluation: Optional[bool]):
# def missing_evaluation(self):
# def features(self) -> List[SimpleColumnSpec]:
# def __repr__(self):
# def __init__(self, feature: SimpleColumnSpec):
# def features(self):
# def __repr__(self):
# def __eq__(self, other):
# def feature(self):
# def __init__(self, feature: SimpleColumnSpec,
# missing_evaluation: Optional[bool]):
# def features(self):
# def __repr__(self):
# def __eq__(self, other):
# def feature(self):
# def __init__(self, feature: SimpleColumnSpec, threshold: float,
# missing_evaluation: Optional[bool]):
# def features(self):
# def __repr__(self):
# def __eq__(self, other):
# def feature(self):
# def threshold(self):
# def __init__(self, feature: SimpleColumnSpec, mask: Union[List[str],
# List[int]],
# missing_evaluation: Optional[bool]):
# def features(self):
# def __repr__(self):
# def __eq__(self, other):
# def feature(self):
# def mask(self):
# def __init__(self, feature: SimpleColumnSpec, mask: Union[List[str],
# List[int]],
# missing_evaluation: Optional[bool]):
# def features(self):
# def __repr__(self):
# def __eq__(self, other):
# def feature(self):
# def mask(self):
# def __init__(self, features: List[SimpleColumnSpec], weights: List[float],
# threshold: float, missing_evaluation: Optional[bool]):
# def features(self):
# def __repr__(self):
# def __eq__(self, other):
# def weights(self):
# def threshold(self):
# def core_condition_to_condition(
# core_condition: decision_tree_pb2.NodeCondition,
# dataspec: data_spec_pb2.DataSpecification) -> AbstractCondition:
# def column_spec_bitmap_to_items(column_spec: data_spec_pb2.Column,
# bitmap: bytes) -> Union[List[int], List[str]]:
# def column_spec_items_to_bitmap(column_spec: data_spec_pb2.Column,
# items: List[int]) -> bytes:
# def set_core_node(condition: AbstractCondition,
# dataspec: data_spec_pb2.DataSpecification,
# core_node: decision_tree_pb2.Node):
#
# Path: tensorflow_decision_forests/component/py_tree/dataspec.py
# OUT_OF_DICTIONARY = "<OOD>"
# class SimpleColumnSpec(NamedTuple):
# def __repr__(self):
# def make_simple_column_spec(dataspec: data_spec_pb2.DataSpecification,
# col_idx: int) -> SimpleColumnSpec:
# def categorical_value_idx_to_value(column_spec: data_spec_pb2.Column,
# value_idx: int) -> Union[int, str]:
# def categorical_column_dictionary_to_list(
# column_spec: data_spec_pb2.Column) -> List[str]:
# def label_value_idx_to_value(column_spec: data_spec_pb2.Column,
# value_idx: int) -> Union[int, str]:
# def discretized_numerical_to_numerical(column_spec: data_spec_pb2.Column,
# value: int) -> float:
# def column_name_to_column_idx(name: str,
# dataspec: data_spec_pb2.DataSpecification) -> int:
, which may include functions, classes, or code. Output only the next line. | column_spec = dataspec.columns.add() |
Continue the code snippet: <|code_start|># You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
class ConditionTest(parameterized.TestCase, tf.test.TestCase):
def test_column_spec_bitmap_to_items_integer(self):
column_spec = data_spec_pb2.Column()
column_spec.categorical.number_of_unique_values = 10
column_spec.categorical.is_already_integerized = True
# b1100101101 => 32Dh
self.assertEqual(
condition_lib.column_spec_bitmap_to_items(column_spec, b"\x2D\x03"),
[0, 2, 3, 5, 8, 9])
def test_column_spec_bitmap_to_items_string(self):
column_spec = data_spec_pb2.Column()
<|code_end|>
. Use current file imports:
from absl.testing import parameterized
from tensorflow_decision_forests.component.py_tree import condition as condition_lib
from tensorflow_decision_forests.component.py_tree import dataspec as dataspec_lib
from yggdrasil_decision_forests.dataset import data_spec_pb2
from yggdrasil_decision_forests.model.decision_tree import decision_tree_pb2
import tensorflow as tf
and context (classes, functions, or code) from other files:
# Path: tensorflow_decision_forests/component/py_tree/condition.py
# class AbstractCondition(object):
# class IsMissingInCondition(AbstractCondition):
# class IsTrueCondition(AbstractCondition):
# class NumericalHigherThanCondition(AbstractCondition):
# class CategoricalIsInCondition(AbstractCondition):
# class CategoricalSetContainsCondition(AbstractCondition):
# class NumericalSparseObliqueCondition(AbstractCondition):
# def __init__(self, missing_evaluation: Optional[bool]):
# def missing_evaluation(self):
# def features(self) -> List[SimpleColumnSpec]:
# def __repr__(self):
# def __init__(self, feature: SimpleColumnSpec):
# def features(self):
# def __repr__(self):
# def __eq__(self, other):
# def feature(self):
# def __init__(self, feature: SimpleColumnSpec,
# missing_evaluation: Optional[bool]):
# def features(self):
# def __repr__(self):
# def __eq__(self, other):
# def feature(self):
# def __init__(self, feature: SimpleColumnSpec, threshold: float,
# missing_evaluation: Optional[bool]):
# def features(self):
# def __repr__(self):
# def __eq__(self, other):
# def feature(self):
# def threshold(self):
# def __init__(self, feature: SimpleColumnSpec, mask: Union[List[str],
# List[int]],
# missing_evaluation: Optional[bool]):
# def features(self):
# def __repr__(self):
# def __eq__(self, other):
# def feature(self):
# def mask(self):
# def __init__(self, feature: SimpleColumnSpec, mask: Union[List[str],
# List[int]],
# missing_evaluation: Optional[bool]):
# def features(self):
# def __repr__(self):
# def __eq__(self, other):
# def feature(self):
# def mask(self):
# def __init__(self, features: List[SimpleColumnSpec], weights: List[float],
# threshold: float, missing_evaluation: Optional[bool]):
# def features(self):
# def __repr__(self):
# def __eq__(self, other):
# def weights(self):
# def threshold(self):
# def core_condition_to_condition(
# core_condition: decision_tree_pb2.NodeCondition,
# dataspec: data_spec_pb2.DataSpecification) -> AbstractCondition:
# def column_spec_bitmap_to_items(column_spec: data_spec_pb2.Column,
# bitmap: bytes) -> Union[List[int], List[str]]:
# def column_spec_items_to_bitmap(column_spec: data_spec_pb2.Column,
# items: List[int]) -> bytes:
# def set_core_node(condition: AbstractCondition,
# dataspec: data_spec_pb2.DataSpecification,
# core_node: decision_tree_pb2.Node):
#
# Path: tensorflow_decision_forests/component/py_tree/dataspec.py
# OUT_OF_DICTIONARY = "<OOD>"
# class SimpleColumnSpec(NamedTuple):
# def __repr__(self):
# def make_simple_column_spec(dataspec: data_spec_pb2.DataSpecification,
# col_idx: int) -> SimpleColumnSpec:
# def categorical_value_idx_to_value(column_spec: data_spec_pb2.Column,
# value_idx: int) -> Union[int, str]:
# def categorical_column_dictionary_to_list(
# column_spec: data_spec_pb2.Column) -> List[str]:
# def label_value_idx_to_value(column_spec: data_spec_pb2.Column,
# value_idx: int) -> Union[int, str]:
# def discretized_numerical_to_numerical(column_spec: data_spec_pb2.Column,
# value: int) -> float:
# def column_name_to_column_idx(name: str,
# dataspec: data_spec_pb2.DataSpecification) -> int:
. Output only the next line. | column_spec.categorical.number_of_unique_values = 10 |
Here is a snippet: <|code_start|># Copyright 2021 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
ops = tf.load_op_library(resource_loader.get_path_to_datafile("inference.so"))
except Exception as e:
check_version.info_fail_to_load_custom_op(e, "inference.so")
raise e
# Importing all the symbols.
module = sys.modules[__name__]
<|code_end|>
. Write the next line using the current file imports:
from tensorflow_decision_forests.tensorflow import check_version
from tensorflow.python.platform import resource_loader
import tensorflow as tf
import sys
and context from other files:
# Path: tensorflow_decision_forests/tensorflow/check_version.py
# def check_version(tf_df_version,
# compatible_tf_versions,
# tf_version=None,
# external_logic=False):
# """Checks the compatibility of the TF version.
#
# Prints a warning message and return False in care of likely incompatible
# versions.
# """
#
# if not external_logic:
# pass
#
# if tf_version is None:
# tf_version = tf.__version__
# if tf_version not in compatible_tf_versions:
# logging.warning(
# "TensorFlow Decision Forests %s is compatible with the following "
# "TensorFlow Versions: %s. However, TensorFlow %s was detected. "
# "This can cause issues with the TF API and symbols in the custom C++ "
# "ops. See the TF and TF-DF compatibility table at "
# "https://github.com/tensorflow/decision-forests/blob/main/documentation/known_issues.md#compatibility-table.",
# tf_df_version, compatible_tf_versions, tf_version)
# return False
# return True
, which may include functions, classes, or code. Output only the next line. | for name, value in ops.__dict__.items(): |
Continue the code snippet: <|code_start|>
def prefetch_initial_objects(self):
objects = self \
.object_class().objects.filter(**self.initial_prefetch_dict) \
.exclude(id__in=CurrentObjectState.objects.filter(workflow=self).values_list("object_id",
flat=True)) if self.initial_prefetch else []
return objects
def __unicode__(self):
return self.name
def is_initial_transition_available(self, *, user, object_id, object_state_id=None, automatic=False):
if object_state_id:
return not CurrentObjectState.objects.filter(id=object_state_id, workflow=self).exists()
else:
last = CurrentObjectState.objects.filter(object_id=object_id, workflow=self).order_by('-id').first()
if last and last.state.active:
return False
elif self.initial_transition is not None:
conditions = self.initial_transition.condition_set.all()
if len(conditions) == 0:
if automatic:
return self.initial_transition.automatic
else:
return not self.initial_transition.automatic
else:
root_condition = conditions.first()
if root_condition.check_condition(user=user, object_id=object_id, object_state=last):
if automatic:
return self.initial_transition.automatic
<|code_end|>
. Use current file imports:
import threading
import json
from datetime import timedelta, datetime
from typing import Tuple, TypeVar
from django.core.exceptions import ValidationError
from django.db import models, transaction
from django.db.models import SET_NULL
from django.utils.timezone import now as django_now
from django.utils.translation import ugettext_lazy
from django.db.models.deletion import PROTECT
from django_workflow.utils import import_from, import_from_path
and context (classes, functions, or code) from other files:
# Path: django_workflow/utils.py
# def import_from(module, name):
# module = __import__(module, fromlist=[name])
# return getattr(module, name)
#
# def import_from_path(full_path):
# parts = full_path.rsplit(".", 1)
# return import_from(parts[0], parts[1])
. Output only the next line. | else: |
Given the code snippet: <|code_start|>
class Function(models.Model):
workflow = models.ForeignKey(Workflow, on_delete=PROTECT, verbose_name=ugettext_lazy("Workflow"),
editable=False)
function_name = models.CharField(max_length=200, verbose_name=ugettext_lazy("Function"))
function_module = models.CharField(max_length=400, verbose_name=ugettext_lazy("Module"))
condition = models.ForeignKey(Condition, on_delete=PROTECT, verbose_name=ugettext_lazy("Condition"))
def __unicode__(self):
return "{} - {}.{}".format(self.condition, self.function_module, self.function_name)
def save(self, **qwargs):
self.workflow = self.condition.workflow
super(Function, self).save(**qwargs)
@property
def function(self):
return import_from(self.function_module, self.function_name)
def clone(self, *, workflow: Workflow, **defaults) -> ('Function', 'Function'):
#old_function: Function
new_function, old_function = clone(self, workflow=workflow, **defaults)
for param in old_function.parameters.all():
new_param, old_param = param.clone(workflow=workflow, function=new_function)
return new_function, old_function
class FunctionParameter(models.Model):
workflow = models.ForeignKey(Workflow, on_delete=PROTECT, verbose_name=ugettext_lazy("Workflow"),
<|code_end|>
, generate the next line using the imports in this file:
import threading
import json
from datetime import timedelta, datetime
from typing import Tuple, TypeVar
from django.core.exceptions import ValidationError
from django.db import models, transaction
from django.db.models import SET_NULL
from django.utils.timezone import now as django_now
from django.utils.translation import ugettext_lazy
from django.db.models.deletion import PROTECT
from django_workflow.utils import import_from, import_from_path
and context (functions, classes, or occasionally code) from other files:
# Path: django_workflow/utils.py
# def import_from(module, name):
# module = __import__(module, fromlist=[name])
# return getattr(module, name)
#
# def import_from_path(full_path):
# parts = full_path.rsplit(".", 1)
# return import_from(parts[0], parts[1])
. Output only the next line. | editable=False) |
Next line prediction: <|code_start|># import a definition from a module at runtime
def object_attribute_value(*, workflow, object_id, user, object_state, **kwargs):
params = parse_parameters(workflow=workflow, object_id=object_id, user=user, object_state=object_state, **kwargs)
if "attribute_name" in params:
attribute_name = params.pop('attribute_name')
object = workflow.object_class().objects.get(id=object_id)
attribute = getattr(object, attribute_name)
if "attribute_value" in params:
attribute_value = params.pop('attribute_value')
return attribute == attribute_value
raise ValueError("missing parameter attribute_name or attribute_value")
def user_attribute_value(*, workflow, object_id, user, object_state, **kwargs):
params = parse_parameters(workflow=workflow, object_id=object_id, user=user, object_state=object_state, **kwargs)
if "attribute_name" in params:
attribute_name = params.pop('attribute_name')
attribute = getattr(user, attribute_name)
if "attribute_value" in params:
attribute_value = params.pop('attribute_value')
return attribute == attribute_value
raise ValueError("missing parameter attribute_name or attribute_value")
def object_attribute_filter_exist(*, workflow, object_id, user, object_state, **kwargs):
params = parse_parameters(workflow=workflow, object_id=object_id, user=user, object_state=object_state, **kwargs)
if "attribute_name" in params:
<|code_end|>
. Use current file imports:
(import ast
import numbers
from django.db.models.manager import Manager
from django_workflow.utils import import_from, import_from_path)
and context including class names, function names, or small code snippets from other files:
# Path: django_workflow/utils.py
# def import_from(module, name):
# module = __import__(module, fromlist=[name])
# return getattr(module, name)
#
# def import_from_path(full_path):
# parts = full_path.rsplit(".", 1)
# return import_from(parts[0], parts[1])
. Output only the next line. | attribute_name = params.pop('attribute_name') |
Given the code snippet: <|code_start|># coding: utf-8
log = log_manager.get_logger(__name__)
RAW_DATA_INPUT_SOURCE = 0xFFFFFFFF
VOLTAGE_EXCITATION = 10322
CURRENT_EXCITATION = 10134
<|code_end|>
, generate the next line using the imports in this file:
import numpy as np
import numpy.polynomial.polynomial as poly
import re
import nptdms.thermocouples as thermocouples
from nptdms.log import log_manager
and context (functions, classes, or occasionally code) from other files:
# Path: nptdms/log.py
# class LogManager(object):
# def __init__(self):
# def get_logger(self, module_name):
# def set_level(self, level):
. Output only the next line. | class NoOpScaling(object): |
Given the following code snippet before the placeholder: <|code_start|> thermocouple = thermocouples.Thermocouple(
forward_polynomials=[
thermocouples.Polynomial(
applicable_range=thermocouples.Range(None, 10),
coefficients=[0.0, 1.0]),
thermocouples.Polynomial(
applicable_range=thermocouples.Range(10, 20),
coefficients=[1.0, 2.0]),
thermocouples.Polynomial(
applicable_range=thermocouples.Range(20, None),
coefficients=[2.0, 3.0]),
],
inverse_polynomials=[]
)
voltages = thermocouple.celsius_to_mv(np.array([0.0, 9.0, 10.0, 11.0, 19.0, 20.0, 21.0]))
np.testing.assert_almost_equal(voltages, np.array([0.0, 9.0, 21.0, 23.0, 39.0, 62.0, 65.0]))
def test_scale_voltage_to_temperature():
thermocouple = thermocouples.Thermocouple(
forward_polynomials=[],
inverse_polynomials=[
thermocouples.Polynomial(
applicable_range=thermocouples.Range(None, 10),
coefficients=[0.0, 1.0]),
thermocouples.Polynomial(
applicable_range=thermocouples.Range(10, 20),
coefficients=[1.0, 2.0]),
thermocouples.Polynomial(
applicable_range=thermocouples.Range(20, None),
<|code_end|>
, predict the next line using imports from the current file:
import numpy as np
import pytest
import thermocouples_reference
from hypothesis import (given, strategies, settings)
from nptdms import thermocouples
and context including class names, function names, and sometimes code from other files:
# Path: nptdms/thermocouples.py
# class Thermocouple(object):
# class Polynomial(object):
# class Range(object):
# def __init__(self, forward_polynomials, inverse_polynomials, exponential_term=None):
# def celsius_to_mv(self, temperature):
# def mv_to_celsius(self, voltage):
# def __init__(self, applicable_range, coefficients):
# def within_range(self, value):
# def apply(self, x):
# def __init__(self, start, end):
# def within_range(self, value):
# def _verify_contiguous(polynomials):
. Output only the next line. | coefficients=[2.0, 3.0]), |
Continue the code snippet: <|code_start|>
sys.path = ['..'] + sys.path
class OpenMock:
def __init__(self, *args, **kw):
self.writes = []
def read(self):
return """LABEL core
<|code_end|>
. Use current file imports:
from functools import reduce
from unittest.mock import MagicMock, patch, sentinel
from scripts import config
from scripts import gen
from scripts import grub
from scripts import iso
import os
import sys
and context (classes, functions, or code) from other files:
# Path: scripts/config.py
# def update_usb_mount(new_usb_details):
# def add_remounted(usb_disk):
#
# Path: scripts/gen.py
# def scripts_dir_path():
# def print_version():
# def quote(text):
# def is_quoted(text):
# def has_digit(word):
# def sys_64bits():
# def iso_cfg_ext_dir():
# def clean_iso_cfg_ext_dir(iso_cfg_ext_dir):
# def copy_mbusb_dir_usb(usb_disk):
# def read_input_yes():
# def strings(filename, _min=4):
# def size_not_enough(iso_link, usb_disk):
# def mbusb_version():
# def check_text_in_file(file_path, text):
# def prepare_mbusb_host_dir():
# def grub_efi_exist(grub_efi_path):
# def process_exist(process_name):
# def write_to_file(filepath, text):
# def __init__(self):
# def windowsRam(self):
# def linuxRam(self):
# class MemoryCheck():
# class MEMORYSTATUS(ctypes.Structure):
#
# Path: scripts/grub.py
# def mbusb_update_grub_cfg():
# def write_custom_grub_cfg(install_dir, loopback_cfg_path):
# def grub_custom_menu(mbus_grub_cfg_path, distro):
# def grub_raw_iso(mbus_grub_cfg_path):
# def write_to_file(file_path, _strings):
# def locate_kernel_file(subpath, isolinux_dir):
# def tweak_bootfile_path(img_file_spec, isolinux_dir):
# def extract_initrd_params_and_fix_kernel(value, isolinux_dir):
# def iso2grub2(install_dir, loopback_cfg_path):
#
# Path: scripts/iso.py
# def iso_name(iso_link):
# def iso_basename(iso_link):
# def isolinux_bin_exist(iso_link):
# def iso_size(iso_link):
# def is_readable(iso_link):
# def is_bootable(iso_link):
# def isolinux_bin_dir(iso_link):
# def isolinux_bin_path(iso_link):
# def iso_menu_lst_path(iso_link):
# def integrity(iso_link):
# def iso_file_list(iso_link):
# def isolinux_version(isolinux_bin_path):
# def iso_extract_file(iso_link, dest_dir, _filter):
# def extract_cfg_file(iso_link):
# def iso_extract_full(iso_link, dest_dir):
# def iso_file_path(iso_link, file_name):
# def get_file_list(iso_link, predicate):
. Output only the next line. | MENU LABEL SliTaz core Live |
Predict the next line for this snippet: <|code_start|>
log = logging.getLogger(__name__)
class ConfigWriter(Config):
def __init__(self):
super(ConfigWriter, self).__init__()
<|code_end|>
with the help of current file imports:
from copilot.models.config import Config
import logging
and context from other files:
# Path: copilot/models/config.py
# class Config(object):
# """Base config writer for plugins.
#
# Your plugin will have different configuration file needs than any other
# program. To handle this we have created a Python based "configuration
# writer" class that can be customized to write all manner of configuration
# files.
#
# Guidance for extending and modifying this config object for plugins can be
# found in the CoPilot wiki.
# https://github.com/OpenInternet/co-pilot/wiki/Plugin-Guide#minimal-configuration-example
# """
#
# def __init__(self):
# self._rules = []
# self.config_dir = "main"
#
# @property
# def config_type(self):
# """Returns the config file type."""
# try:
# return self._config_type
# except AttributeError as err:
# log.debug("Config type is not yet set, returning empty string.")
# return ""
#
# @config_type.setter
# def config_type(self, config_type):
# """Sets the type of the config and the location of the config file.
#
# Args:
# type (str): The 'type' of a configuration is an abstraction
# of the directory that the config file is within.
# """
#
# try:
# config_file = get_config_path(config_type)
# log.debug("config file {0} found".format(config_file))
# except ValueError as err:
# log.error("An invalid config type was passed. Please check \"get_config_path\" in the models/config.py scripts for the valid types of config files.")
# raise ValueError(err)
# log.debug("setting config type.")
# self._config_type = config_type
# log.debug("setting config file {0}.".format(config_file))
# self.config_file = config_file
#
# def check_file(self):
# """Checks if the config file exists."""
#
# if os.path.exists(self.config_file):
# return True
# else:
# return False
#
# def delete_config(self):
# """Deletes the plugin config files if they exists."""
# if os.path.exists(self.config_file):
# os.remove(self.config_file)
#
# def add_rule(self, rule):
# """ check, transform, and add a single rule.
#
# This function takes a single rule, checks if that rule is valid, transforms and formats the rule, and adds that rule to the self._rules list in a way that can be processed by the writer.
#
# Args:
# rule (list): A list containing the action, target, and sub-target
# of a rule as three strings.
# - action, target, sub_target = rule[0], rule[1], rule[2]
# """
#
# log.debug("adding rule {0}".format(rule))
# self._rules.append(rule)
#
# def write(self):
# """ Writes the specified config file.
#
# Opens, and clears the specified config file and then writes the header
# and then all of the rules for this config.
# """
#
# self.prepare()
# log.debug("Opening config file {0} for writing.".format(self.config_file))
# with open(self.config_file, 'w+') as config_file:
# self.write_header(config_file)
# for rule in self._rules:
# self.write_rule(config_file, rule)
#
# def prepare(self):
# """Create a config directory if it does not exist."""
#
# log.info("Creating the config directory " +
# "{0} if it does not exist.".format(self.config_dir))
# _dir = get_config_dir(self.config_dir)
# if not os.path.exists(_dir):
# log.info("Creating the main config directory {0}.".format(_dir))
# os.makedirs(_dir)
# else:
# log.info("The config directory {0} exists and will not be created.".format(_dir))
#
# def write_rule(self, config_file, rule):
# """ Write a single rule within a config file.
# Args:
# config_file: A file handler for a config file to write to.
# rule: A string that should be written to the file.
# """
#
# log.debug("writing rule {0}".format(rule))
# config_file.write(rule)
#
# def write_header(self, config_file):
# """ Write a header at the top of a config file.
# Args:
# config_file: A file handler for a config file to write to.
# """
#
# log.debug("writing header info {0}".format(self.header))
# if self.header:
# log.debug("Found header. Writing to config file {0}".format(config_file))
# config_file.write(self.header)
# else:
# log.debug("No header found.")
, which may contain function names, class names, or code. Output only the next line. | log.info("example config loaded.") |
Continue the code snippet: <|code_start|># http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class TestCapabilities(base.BaseTestCase):
def test_recursive_keypairs(self):
data = {'a': 'A', 'b': 'B',
'nested': {'a': 'A', 'b': 'B'}}
pairs = list(capabilities._recursive_keypairs(data))
self.assertEqual([('a', 'A'), ('b', 'B'),
('nested:a', 'A'), ('nested:b', 'B')],
pairs)
def test_recursive_keypairs_with_separator(self):
data = {'a': 'A',
'b': 'B',
'nested': {'a': 'A',
'b': 'B',
},
}
separator = '.'
pairs = list(capabilities._recursive_keypairs(data, separator))
self.assertEqual([('a', 'A'),
<|code_end|>
. Use current file imports:
from oslotest import base
from aodh.api.controllers.v2 import capabilities
and context (classes, functions, or code) from other files:
# Path: aodh/api/controllers/v2/capabilities.py
# def _decode_unicode(input):
# def _recursive_keypairs(d, separator=':'):
# def _flatten_capabilities(capabilities):
# def sample(cls):
# def get(self):
# class Capabilities(base.Base):
# class CapabilitiesController(rest.RestController):
. Output only the next line. | ('b', 'B'), |
Predict the next line after this snippet: <|code_start|> help='Number of workers for notifier service. '
'default value is 1.')
]
LISTENER_OPTS = [
cfg.IntOpt('workers',
default=1,
min=1,
help='Number of workers for listener service. '
'default value is 1.')
]
def prepare_service(argv=None, config_files=None):
conf = cfg.ConfigOpts()
oslo_i18n.enable_lazy()
log.register_options(conf)
log_levels = (
conf.default_log_levels +
[
'futurist=INFO',
'keystoneclient=INFO',
'oslo_db.sqlalchemy=WARN',
'cotyledon=INFO'
]
)
log.set_defaults(default_log_levels=log_levels)
defaults.set_cors_middleware_defaults()
db_options.set_defaults(conf)
if profiler_opts:
<|code_end|>
using the current file's imports:
import os
import oslo_i18n
from keystoneauth1 import loading as ka_loading
from oslo_config import cfg
from oslo_db import options as db_options
from oslo_log import log
from oslo_policy import opts as policy_opts
from oslo_reports import guru_meditation_report as gmr
from oslo_utils import importutils
from aodh.conf import defaults
from aodh import keystone_client
from aodh import messaging
from aodh import profiler
from aodh import version
from aodh import opts
and any relevant context from other files:
# Path: aodh/conf/defaults.py
# def set_lib_defaults():
# def set_cors_middleware_defaults():
#
# Path: aodh/keystone_client.py
# CFG_GROUP = "service_credentials"
# OPTS = [
# cfg.StrOpt('region-name',
# default=os.environ.get('OS_REGION_NAME'),
# deprecated_name="os-region-name",
# help='Region name to use for OpenStack service endpoints.'),
# cfg.StrOpt('interface',
# default=os.environ.get(
# 'OS_INTERFACE', os.environ.get('OS_ENDPOINT_TYPE',
# 'public')),
# deprecated_name="os-endpoint-type",
# choices=('public', 'internal', 'admin', 'auth', 'publicURL',
# 'internalURL', 'adminURL'),
# help='Type of endpoint in Identity service catalog to use for '
# 'communication with OpenStack services.'),
# ]
# def get_session(conf):
# def get_client(conf):
# def get_trusted_client(conf, trust_id):
# def get_auth_token(client):
# def get_client_on_behalf_user(conf, auth_plugin):
# def create_trust_id(conf, trustor_user_id, trustor_project_id, roles,
# auth_plugin):
# def delete_trust_id(conf, trust_id, auth_plugin):
# def url_for(conf, **kwargs):
# def get_heat_client_from_trust(conf, trust_id):
# def register_keystoneauth_opts(conf):
#
# Path: aodh/messaging.py
# DEFAULT_URL = "__default__"
# TRANSPORTS = {}
# _SERIALIZER = oslo_serializer.JsonPayloadSerializer()
# def setup():
# def get_transport(conf, url=None, optional=False, cache=True):
# def get_batch_notification_listener(transport, targets, endpoints,
# allow_requeue=False,
# batch_size=1, batch_timeout=None):
# def get_notifier(transport, publisher_id):
#
# Path: aodh/profiler.py
# LOG = log.getLogger(__name__)
# class WsgiMiddleware(object):
# def __init__(self, application, **kwargs):
# def factory(cls, global_conf, **local_conf):
# def filter_(app):
# def __call__(self, request):
# def setup(conf):
# def trace_cls(name, **kwargs):
# def decorator(cls):
#
# Path: aodh/version.py
. Output only the next line. | profiler_opts.set_defaults(conf) |
Based on the snippet: <|code_start|># under the License.
profiler_opts = importutils.try_import('osprofiler.opts')
OPTS = [
cfg.IntOpt('http_timeout',
default=600,
help='Timeout seconds for HTTP requests. Set it to None to '
'disable timeout.'),
cfg.IntOpt('evaluation_interval',
default=60,
help='Period of evaluation cycle, should'
' be >= than configured pipeline interval for'
' collection of underlying meters.'),
]
EVALUATOR_OPTS = [
cfg.IntOpt('workers',
default=1,
min=1,
help='Number of workers for evaluator service. '
'default value is 1.')
]
NOTIFIER_OPTS = [
cfg.IntOpt('workers',
default=1,
min=1,
<|code_end|>
, predict the immediate next line with the help of imports:
import os
import oslo_i18n
from keystoneauth1 import loading as ka_loading
from oslo_config import cfg
from oslo_db import options as db_options
from oslo_log import log
from oslo_policy import opts as policy_opts
from oslo_reports import guru_meditation_report as gmr
from oslo_utils import importutils
from aodh.conf import defaults
from aodh import keystone_client
from aodh import messaging
from aodh import profiler
from aodh import version
from aodh import opts
and context (classes, functions, sometimes code) from other files:
# Path: aodh/conf/defaults.py
# def set_lib_defaults():
# def set_cors_middleware_defaults():
#
# Path: aodh/keystone_client.py
# CFG_GROUP = "service_credentials"
# OPTS = [
# cfg.StrOpt('region-name',
# default=os.environ.get('OS_REGION_NAME'),
# deprecated_name="os-region-name",
# help='Region name to use for OpenStack service endpoints.'),
# cfg.StrOpt('interface',
# default=os.environ.get(
# 'OS_INTERFACE', os.environ.get('OS_ENDPOINT_TYPE',
# 'public')),
# deprecated_name="os-endpoint-type",
# choices=('public', 'internal', 'admin', 'auth', 'publicURL',
# 'internalURL', 'adminURL'),
# help='Type of endpoint in Identity service catalog to use for '
# 'communication with OpenStack services.'),
# ]
# def get_session(conf):
# def get_client(conf):
# def get_trusted_client(conf, trust_id):
# def get_auth_token(client):
# def get_client_on_behalf_user(conf, auth_plugin):
# def create_trust_id(conf, trustor_user_id, trustor_project_id, roles,
# auth_plugin):
# def delete_trust_id(conf, trust_id, auth_plugin):
# def url_for(conf, **kwargs):
# def get_heat_client_from_trust(conf, trust_id):
# def register_keystoneauth_opts(conf):
#
# Path: aodh/messaging.py
# DEFAULT_URL = "__default__"
# TRANSPORTS = {}
# _SERIALIZER = oslo_serializer.JsonPayloadSerializer()
# def setup():
# def get_transport(conf, url=None, optional=False, cache=True):
# def get_batch_notification_listener(transport, targets, endpoints,
# allow_requeue=False,
# batch_size=1, batch_timeout=None):
# def get_notifier(transport, publisher_id):
#
# Path: aodh/profiler.py
# LOG = log.getLogger(__name__)
# class WsgiMiddleware(object):
# def __init__(self, application, **kwargs):
# def factory(cls, global_conf, **local_conf):
# def filter_(app):
# def __call__(self, request):
# def setup(conf):
# def trace_cls(name, **kwargs):
# def decorator(cls):
#
# Path: aodh/version.py
. Output only the next line. | help='Number of workers for notifier service. ' |
Given the code snippet: <|code_start|>]
EVALUATOR_OPTS = [
cfg.IntOpt('workers',
default=1,
min=1,
help='Number of workers for evaluator service. '
'default value is 1.')
]
NOTIFIER_OPTS = [
cfg.IntOpt('workers',
default=1,
min=1,
help='Number of workers for notifier service. '
'default value is 1.')
]
LISTENER_OPTS = [
cfg.IntOpt('workers',
default=1,
min=1,
help='Number of workers for listener service. '
'default value is 1.')
]
def prepare_service(argv=None, config_files=None):
conf = cfg.ConfigOpts()
oslo_i18n.enable_lazy()
<|code_end|>
, generate the next line using the imports in this file:
import os
import oslo_i18n
from keystoneauth1 import loading as ka_loading
from oslo_config import cfg
from oslo_db import options as db_options
from oslo_log import log
from oslo_policy import opts as policy_opts
from oslo_reports import guru_meditation_report as gmr
from oslo_utils import importutils
from aodh.conf import defaults
from aodh import keystone_client
from aodh import messaging
from aodh import profiler
from aodh import version
from aodh import opts
and context (functions, classes, or occasionally code) from other files:
# Path: aodh/conf/defaults.py
# def set_lib_defaults():
# def set_cors_middleware_defaults():
#
# Path: aodh/keystone_client.py
# CFG_GROUP = "service_credentials"
# OPTS = [
# cfg.StrOpt('region-name',
# default=os.environ.get('OS_REGION_NAME'),
# deprecated_name="os-region-name",
# help='Region name to use for OpenStack service endpoints.'),
# cfg.StrOpt('interface',
# default=os.environ.get(
# 'OS_INTERFACE', os.environ.get('OS_ENDPOINT_TYPE',
# 'public')),
# deprecated_name="os-endpoint-type",
# choices=('public', 'internal', 'admin', 'auth', 'publicURL',
# 'internalURL', 'adminURL'),
# help='Type of endpoint in Identity service catalog to use for '
# 'communication with OpenStack services.'),
# ]
# def get_session(conf):
# def get_client(conf):
# def get_trusted_client(conf, trust_id):
# def get_auth_token(client):
# def get_client_on_behalf_user(conf, auth_plugin):
# def create_trust_id(conf, trustor_user_id, trustor_project_id, roles,
# auth_plugin):
# def delete_trust_id(conf, trust_id, auth_plugin):
# def url_for(conf, **kwargs):
# def get_heat_client_from_trust(conf, trust_id):
# def register_keystoneauth_opts(conf):
#
# Path: aodh/messaging.py
# DEFAULT_URL = "__default__"
# TRANSPORTS = {}
# _SERIALIZER = oslo_serializer.JsonPayloadSerializer()
# def setup():
# def get_transport(conf, url=None, optional=False, cache=True):
# def get_batch_notification_listener(transport, targets, endpoints,
# allow_requeue=False,
# batch_size=1, batch_timeout=None):
# def get_notifier(transport, publisher_id):
#
# Path: aodh/profiler.py
# LOG = log.getLogger(__name__)
# class WsgiMiddleware(object):
# def __init__(self, application, **kwargs):
# def factory(cls, global_conf, **local_conf):
# def filter_(app):
# def __call__(self, request):
# def setup(conf):
# def trace_cls(name, **kwargs):
# def decorator(cls):
#
# Path: aodh/version.py
. Output only the next line. | log.register_options(conf) |
Given the code snippet: <|code_start|>#!/usr/bin/env python
#
# Copyright 2013-2017 Red Hat, Inc
# Copyright 2012-2015 eNovance <licensing@enovance.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
profiler_opts = importutils.try_import('osprofiler.opts')
OPTS = [
cfg.IntOpt('http_timeout',
default=600,
<|code_end|>
, generate the next line using the imports in this file:
import os
import oslo_i18n
from keystoneauth1 import loading as ka_loading
from oslo_config import cfg
from oslo_db import options as db_options
from oslo_log import log
from oslo_policy import opts as policy_opts
from oslo_reports import guru_meditation_report as gmr
from oslo_utils import importutils
from aodh.conf import defaults
from aodh import keystone_client
from aodh import messaging
from aodh import profiler
from aodh import version
from aodh import opts
and context (functions, classes, or occasionally code) from other files:
# Path: aodh/conf/defaults.py
# def set_lib_defaults():
# def set_cors_middleware_defaults():
#
# Path: aodh/keystone_client.py
# CFG_GROUP = "service_credentials"
# OPTS = [
# cfg.StrOpt('region-name',
# default=os.environ.get('OS_REGION_NAME'),
# deprecated_name="os-region-name",
# help='Region name to use for OpenStack service endpoints.'),
# cfg.StrOpt('interface',
# default=os.environ.get(
# 'OS_INTERFACE', os.environ.get('OS_ENDPOINT_TYPE',
# 'public')),
# deprecated_name="os-endpoint-type",
# choices=('public', 'internal', 'admin', 'auth', 'publicURL',
# 'internalURL', 'adminURL'),
# help='Type of endpoint in Identity service catalog to use for '
# 'communication with OpenStack services.'),
# ]
# def get_session(conf):
# def get_client(conf):
# def get_trusted_client(conf, trust_id):
# def get_auth_token(client):
# def get_client_on_behalf_user(conf, auth_plugin):
# def create_trust_id(conf, trustor_user_id, trustor_project_id, roles,
# auth_plugin):
# def delete_trust_id(conf, trust_id, auth_plugin):
# def url_for(conf, **kwargs):
# def get_heat_client_from_trust(conf, trust_id):
# def register_keystoneauth_opts(conf):
#
# Path: aodh/messaging.py
# DEFAULT_URL = "__default__"
# TRANSPORTS = {}
# _SERIALIZER = oslo_serializer.JsonPayloadSerializer()
# def setup():
# def get_transport(conf, url=None, optional=False, cache=True):
# def get_batch_notification_listener(transport, targets, endpoints,
# allow_requeue=False,
# batch_size=1, batch_timeout=None):
# def get_notifier(transport, publisher_id):
#
# Path: aodh/profiler.py
# LOG = log.getLogger(__name__)
# class WsgiMiddleware(object):
# def __init__(self, application, **kwargs):
# def factory(cls, global_conf, **local_conf):
# def filter_(app):
# def __call__(self, request):
# def setup(conf):
# def trace_cls(name, **kwargs):
# def decorator(cls):
#
# Path: aodh/version.py
. Output only the next line. | help='Timeout seconds for HTTP requests. Set it to None to ' |
Using the snippet: <|code_start|>#!/usr/bin/env python
#
# Copyright 2013-2017 Red Hat, Inc
# Copyright 2012-2015 eNovance <licensing@enovance.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
profiler_opts = importutils.try_import('osprofiler.opts')
OPTS = [
cfg.IntOpt('http_timeout',
default=600,
help='Timeout seconds for HTTP requests. Set it to None to '
'disable timeout.'),
<|code_end|>
, determine the next line of code. You have imports:
import os
import oslo_i18n
from keystoneauth1 import loading as ka_loading
from oslo_config import cfg
from oslo_db import options as db_options
from oslo_log import log
from oslo_policy import opts as policy_opts
from oslo_reports import guru_meditation_report as gmr
from oslo_utils import importutils
from aodh.conf import defaults
from aodh import keystone_client
from aodh import messaging
from aodh import profiler
from aodh import version
from aodh import opts
and context (class names, function names, or code) available:
# Path: aodh/conf/defaults.py
# def set_lib_defaults():
# def set_cors_middleware_defaults():
#
# Path: aodh/keystone_client.py
# CFG_GROUP = "service_credentials"
# OPTS = [
# cfg.StrOpt('region-name',
# default=os.environ.get('OS_REGION_NAME'),
# deprecated_name="os-region-name",
# help='Region name to use for OpenStack service endpoints.'),
# cfg.StrOpt('interface',
# default=os.environ.get(
# 'OS_INTERFACE', os.environ.get('OS_ENDPOINT_TYPE',
# 'public')),
# deprecated_name="os-endpoint-type",
# choices=('public', 'internal', 'admin', 'auth', 'publicURL',
# 'internalURL', 'adminURL'),
# help='Type of endpoint in Identity service catalog to use for '
# 'communication with OpenStack services.'),
# ]
# def get_session(conf):
# def get_client(conf):
# def get_trusted_client(conf, trust_id):
# def get_auth_token(client):
# def get_client_on_behalf_user(conf, auth_plugin):
# def create_trust_id(conf, trustor_user_id, trustor_project_id, roles,
# auth_plugin):
# def delete_trust_id(conf, trust_id, auth_plugin):
# def url_for(conf, **kwargs):
# def get_heat_client_from_trust(conf, trust_id):
# def register_keystoneauth_opts(conf):
#
# Path: aodh/messaging.py
# DEFAULT_URL = "__default__"
# TRANSPORTS = {}
# _SERIALIZER = oslo_serializer.JsonPayloadSerializer()
# def setup():
# def get_transport(conf, url=None, optional=False, cache=True):
# def get_batch_notification_listener(transport, targets, endpoints,
# allow_requeue=False,
# batch_size=1, batch_timeout=None):
# def get_notifier(transport, publisher_id):
#
# Path: aodh/profiler.py
# LOG = log.getLogger(__name__)
# class WsgiMiddleware(object):
# def __init__(self, application, **kwargs):
# def factory(cls, global_conf, **local_conf):
# def filter_(app):
# def __call__(self, request):
# def setup(conf):
# def trace_cls(name, **kwargs):
# def decorator(cls):
#
# Path: aodh/version.py
. Output only the next line. | cfg.IntOpt('evaluation_interval', |
Using the snippet: <|code_start|># Copyright (c) 2018 NEC, Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class TestUpgradeChecks(base.BaseTestCase):
def setUp(self):
super(TestUpgradeChecks, self).setUp()
self.cmd = status.Checks()
cfg.CONF(args=[], project='aodh')
def test_checks(self):
for name, func in self.cmd._upgrade_checks:
if isinstance(func, tuple):
func_name, kwargs = func
result = func_name(self, **kwargs)
<|code_end|>
, determine the next line of code. You have imports:
from oslo_config import cfg
from oslo_upgradecheck.upgradecheck import Code
from aodh.cmd import status
from aodh.tests import base
and context (class names, function names, or code) available:
# Path: aodh/cmd/status.py
# CONF = cfg.CONF
# class Checks(upgradecheck.UpgradeCommands):
# def main():
#
# Path: aodh/tests/base.py
# class BaseTestCase(base.BaseTestCase):
# class SkipNotImplementedMeta(type):
# def setup_messaging(self, conf, exchange=None):
# def assertTimestampEqual(self, first, second, msg=None):
# def assertIsEmpty(self, obj):
# def assertIsNotEmpty(self, obj):
# def assertDictContains(self, parent, child):
# def path_get(project_file=None):
# def assert_single_item(self, items, **filters):
# def assert_multiple_items(self, items, count, **filters):
# def _matches(item, **props):
# def _skip_decorator(func):
# def skip_if_not_implemented(*args, **kwargs):
# def __new__(cls, name, bases, local):
. Output only the next line. | else: |
Predict the next line for this snippet: <|code_start|> cfg.StrOpt('notifier_topic',
default='alarming',
help='The topic that aodh uses for alarm notifier '
'messages.'),
]
LOG = log.getLogger(__name__)
class AlarmNotifier(object):
def __init__(self, conf):
self.notifier = oslo_messaging.Notifier(
messaging.get_transport(conf),
driver='messagingv2',
publisher_id="alarming.evaluator",
topics=[conf.notifier_topic])
def notify(self, alarm, previous, reason, reason_data):
actions = getattr(alarm, models.Alarm.ALARM_ACTIONS_MAP[alarm.state])
if not actions:
LOG.debug('alarm %(alarm_id)s has no action configured '
'for state transition from %(previous)s to '
'state %(state)s, skipping the notification.',
{'alarm_id': alarm.alarm_id,
'previous': previous,
'state': alarm.state})
return
payload = {'actions': actions,
'alarm_id': alarm.alarm_id,
'alarm_name': alarm.name,
<|code_end|>
with the help of current file imports:
from oslo_config import cfg
from oslo_log import log
from aodh import messaging
from aodh.storage import models
import oslo_messaging
and context from other files:
# Path: aodh/messaging.py
# DEFAULT_URL = "__default__"
# TRANSPORTS = {}
# _SERIALIZER = oslo_serializer.JsonPayloadSerializer()
# def setup():
# def get_transport(conf, url=None, optional=False, cache=True):
# def get_batch_notification_listener(transport, targets, endpoints,
# allow_requeue=False,
# batch_size=1, batch_timeout=None):
# def get_notifier(transport, publisher_id):
#
# Path: aodh/storage/models.py
# class Alarm(base.Model):
# class AlarmChange(base.Model):
# class Quota(base.Model):
# ALARM_INSUFFICIENT_DATA = 'insufficient data'
# ALARM_OK = 'ok'
# ALARM_ALARM = 'alarm'
# ALARM_ACTIONS_MAP = {
# ALARM_INSUFFICIENT_DATA: 'insufficient_data_actions',
# ALARM_OK: 'ok_actions',
# ALARM_ALARM: 'alarm_actions',
# }
# ALARM_LEVEL_LOW = 'low'
# ALARM_LEVEL_MODERATE = 'moderate'
# ALARM_LEVEL_CRITICAL = 'critical'
# SUPPORT_SORT_KEYS = (
# 'alarm_id', 'enabled', 'name', 'type', 'severity', 'timestamp',
# 'user_id', 'project_id', 'state', 'repeat_actions', 'state_timestamp')
# DEFAULT_SORT = [('timestamp', 'desc')]
# PRIMARY_KEY = 'alarm_id'
# CREATION = 'creation'
# RULE_CHANGE = 'rule change'
# STATE_TRANSITION = 'state transition'
# DELETION = 'deletion'
# SUPPORT_SORT_KEYS = (
# 'event_id', 'alarm_id', 'on_behalf_of', 'project_id', 'user_id',
# 'type', 'timestamp', 'severity')
# DEFAULT_SORT = [('timestamp', 'desc')]
# PRIMARY_KEY = 'event_id'
# def __init__(self, alarm_id, type, enabled, name, description,
# timestamp, user_id, project_id, state, state_timestamp,
# state_reason, ok_actions, alarm_actions,
# insufficient_data_actions, repeat_actions, rule,
# time_constraints, severity=None, evaluate_timestamp=None):
# def __init__(self,
# event_id,
# alarm_id,
# type,
# detail,
# user_id,
# project_id,
# on_behalf_of,
# severity=None,
# timestamp=None
# ):
# def __init__(self, project_id, resource, limit):
, which may contain function names, class names, or code. Output only the next line. | 'severity': alarm.severity, |
Predict the next line for this snippet: <|code_start|> current, reason, reason_data)
mock_heatclient.assert_called_once_with(self.conf, "fake_trust_id")
mock_client.resources.mark_unhealthy.assert_called_once_with(
"fake_asg_id",
"fake_resource_name",
True,
"unhealthy load balancer member"
)
mock_client.stacks.update.assert_called_once_with(
"fake_stack_id", existing=True
)
@mock.patch("aodh.keystone_client.get_heat_client_from_trust")
def test_notify_stack_id_missing(self, mock_heatclient):
action = netutils.urlsplit("trust+autohealer://fake_trust_id:delete@")
alarm_id = "fake_alarm_id"
alarm_name = "fake_alarm_name"
severity = "low"
previous = "ok"
current = "alarm"
reason = "no good reason"
reason_data = {
"asg_id": "fake_asg_id",
"unhealthy_members": [
{"tags": ["3bd8bc5a-7632-11e9-84cd-00224d6b7bc1"]}
]
}
notifier = heat_notifier.TrustHeatAlarmNotifier(self.conf)
<|code_end|>
with the help of current file imports:
from unittest import mock
from oslo_utils import netutils
from aodh.notifier import heat as heat_notifier
from aodh.tests.unit.notifier import base
and context from other files:
# Path: aodh/notifier/heat.py
# LOG = log.getLogger(__name__)
# class TrustHeatAlarmNotifier(notifier.AlarmNotifier):
# def __init__(self, conf):
# def notify(self, action, alarm_id, alarm_name, severity, previous, current,
# reason, reason_data):
#
# Path: aodh/tests/unit/notifier/base.py
# class TestNotifierBase(base.BaseTestCase):
# def setUp(self):
, which may contain function names, class names, or code. Output only the next line. | notifier.notify(action, alarm_id, alarm_name, severity, previous, |
Here is a snippet: <|code_start|>#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class TestTrustHeatAlarmNotifier(base.TestNotifierBase):
@mock.patch("aodh.keystone_client.get_heat_client_from_trust")
def test_notify(self, mock_heatclient):
action = netutils.urlsplit("trust+autohealer://fake_trust_id:delete@")
alarm_id = "fake_alarm_id"
alarm_name = "fake_alarm_name"
severity = "low"
previous = "ok"
current = "alarm"
reason = "no good reason"
reason_data = {
"stack_id": "fake_stack_id",
"asg_id": "fake_asg_id",
"unhealthy_members": [
{"id": "3bd8bc5a-7632-11e9-84cd-00224d6b7bc1"}
]
}
class FakeResource(object):
<|code_end|>
. Write the next line using the current file imports:
from unittest import mock
from oslo_utils import netutils
from aodh.notifier import heat as heat_notifier
from aodh.tests.unit.notifier import base
and context from other files:
# Path: aodh/notifier/heat.py
# LOG = log.getLogger(__name__)
# class TrustHeatAlarmNotifier(notifier.AlarmNotifier):
# def __init__(self, conf):
# def notify(self, action, alarm_id, alarm_name, severity, previous, current,
# reason, reason_data):
#
# Path: aodh/tests/unit/notifier/base.py
# class TestNotifierBase(base.BaseTestCase):
# def setUp(self):
, which may include functions, classes, or code. Output only the next line. | def __init__(self, resource_name): |
Next line prediction: <|code_start|> super(AggregationMetricByResourcesLookupRule,
cls).validate_alarm(alarm)
rule = alarm.gnocchi_aggregation_by_resources_threshold_rule
# check the query string is a valid json
try:
query = json.loads(rule.query)
except ValueError:
raise wsme.exc.InvalidInput('rule/query', rule.query)
conf = pecan.request.cfg
# Scope the alarm to the project id if needed
auth_project = v2_utils.get_auth_project(alarm.project_id)
if auth_project:
perms_filter = {"=": {"created_by_project_id": auth_project}}
external_project_owner = cls.get_external_project_owner()
if external_project_owner:
perms_filter = {"or": [
perms_filter,
{"and": [
{"=": {"created_by_project_id":
external_project_owner}},
{"=": {"project_id": auth_project}}]}
]}
query = {"and": [perms_filter, query]}
<|code_end|>
. Use current file imports:
(import json
import threading
import cachetools
import pecan
import wsme
from gnocchiclient import client
from gnocchiclient import exceptions
from keystoneauth1 import exceptions as ka_exceptions
from oslo_config import cfg
from oslo_log import log
from wsme import types as wtypes
from aodh.api.controllers.v2 import base
from aodh.api.controllers.v2 import utils as v2_utils
from aodh import keystone_client)
and context including class names, function names, or small code snippets from other files:
# Path: aodh/api/controllers/v2/base.py
# class ClientSideError(wsme.exc.ClientSideError):
# class ProjectNotAuthorized(ClientSideError):
# class AdvEnum(wtypes.wsproperty):
# class Base(wtypes.DynamicBase):
# class Query(Base):
# class AlarmNotFound(ClientSideError):
# class AlarmRule(Base):
# def __init__(self, error, status_code=400):
# def __init__(self, id, aspect='project'):
# def __init__(self, name, *args, **kwargs):
# def _get(self, parent):
# def _set(self, parent, value):
# def from_db_model(cls, m):
# def from_db_and_links(cls, m, links):
# def as_dict(self, db_model):
# def as_dict_from_keys(self, keys):
# def to_dict(self):
# def get_op(self):
# def set_op(self, value):
# def __repr__(self):
# def sample(cls):
# def as_dict(self):
# def get_value(self, forced_type=None):
# def __init__(self, alarm, auth_project):
# def validate_alarm(alarm):
# def create_hook(alarm):
# def update_hook(alarm):
#
# Path: aodh/api/controllers/v2/utils.py
# def get_auth_project(on_behalf_of=None):
# def sanitize_query(query, db_func, on_behalf_of=None):
# def _verify_query_segregation(query, auth_project=None):
# def validate_query(query, db_func, internal_keys=None,
# allow_timestamps=True):
# def _is_field_metadata(field):
# def _validate_timestamp_fields(query, field_name, operator_list,
# allow_timestamps):
# def query_to_kwargs(query, db_func, internal_keys=None,
# allow_timestamps=True):
# def _get_query_timestamps(args=None):
# def _parse_timestamp(timestamp):
# def set_resp_location_hdr(location):
# def get_pagination_options(sort, limit, marker, api_model):
# def get_query_value(queries, field, type=None):
# def is_field_exist(queries, field):
#
# Path: aodh/keystone_client.py
# CFG_GROUP = "service_credentials"
# OPTS = [
# cfg.StrOpt('region-name',
# default=os.environ.get('OS_REGION_NAME'),
# deprecated_name="os-region-name",
# help='Region name to use for OpenStack service endpoints.'),
# cfg.StrOpt('interface',
# default=os.environ.get(
# 'OS_INTERFACE', os.environ.get('OS_ENDPOINT_TYPE',
# 'public')),
# deprecated_name="os-endpoint-type",
# choices=('public', 'internal', 'admin', 'auth', 'publicURL',
# 'internalURL', 'adminURL'),
# help='Type of endpoint in Identity service catalog to use for '
# 'communication with OpenStack services.'),
# ]
# def get_session(conf):
# def get_client(conf):
# def get_trusted_client(conf, trust_id):
# def get_auth_token(client):
# def get_client_on_behalf_user(conf, auth_plugin):
# def create_trust_id(conf, trustor_user_id, trustor_project_id, roles,
# auth_plugin):
# def delete_trust_id(conf, trust_id, auth_plugin):
# def url_for(conf, **kwargs):
# def get_heat_client_from_trust(conf, trust_id):
# def register_keystoneauth_opts(conf):
. Output only the next line. | rule.query = json.dumps(query) |
Using the snippet: <|code_start|>
class MetricOfResourceRule(AlarmGnocchiThresholdRule):
metric = wsme.wsattr(wtypes.text, mandatory=True)
"The name of the metric"
resource_id = wsme.wsattr(wtypes.text, mandatory=True)
"The id of a resource"
resource_type = wsme.wsattr(wtypes.text, mandatory=True)
"The resource type"
def as_dict(self):
rule = self.as_dict_from_keys(['granularity', 'comparison_operator',
'threshold', 'aggregation_method',
'evaluation_periods',
'metric',
'resource_id',
'resource_type'])
return rule
class AggregationMetricByResourcesLookupRule(AlarmGnocchiThresholdRule):
metric = wsme.wsattr(wtypes.text, mandatory=True)
"The name of the metric"
query = wsme.wsattr(wtypes.text, mandatory=True)
('The query to filter the metric, Don\'t forget to filter out '
'deleted resources (example: {"and": [{"=": {"ended_at": null}}, ...]}), '
'Otherwise Gnocchi will try to create the aggregate against obsolete '
<|code_end|>
, determine the next line of code. You have imports:
import json
import threading
import cachetools
import pecan
import wsme
from gnocchiclient import client
from gnocchiclient import exceptions
from keystoneauth1 import exceptions as ka_exceptions
from oslo_config import cfg
from oslo_log import log
from wsme import types as wtypes
from aodh.api.controllers.v2 import base
from aodh.api.controllers.v2 import utils as v2_utils
from aodh import keystone_client
and context (class names, function names, or code) available:
# Path: aodh/api/controllers/v2/base.py
# class ClientSideError(wsme.exc.ClientSideError):
# class ProjectNotAuthorized(ClientSideError):
# class AdvEnum(wtypes.wsproperty):
# class Base(wtypes.DynamicBase):
# class Query(Base):
# class AlarmNotFound(ClientSideError):
# class AlarmRule(Base):
# def __init__(self, error, status_code=400):
# def __init__(self, id, aspect='project'):
# def __init__(self, name, *args, **kwargs):
# def _get(self, parent):
# def _set(self, parent, value):
# def from_db_model(cls, m):
# def from_db_and_links(cls, m, links):
# def as_dict(self, db_model):
# def as_dict_from_keys(self, keys):
# def to_dict(self):
# def get_op(self):
# def set_op(self, value):
# def __repr__(self):
# def sample(cls):
# def as_dict(self):
# def get_value(self, forced_type=None):
# def __init__(self, alarm, auth_project):
# def validate_alarm(alarm):
# def create_hook(alarm):
# def update_hook(alarm):
#
# Path: aodh/api/controllers/v2/utils.py
# def get_auth_project(on_behalf_of=None):
# def sanitize_query(query, db_func, on_behalf_of=None):
# def _verify_query_segregation(query, auth_project=None):
# def validate_query(query, db_func, internal_keys=None,
# allow_timestamps=True):
# def _is_field_metadata(field):
# def _validate_timestamp_fields(query, field_name, operator_list,
# allow_timestamps):
# def query_to_kwargs(query, db_func, internal_keys=None,
# allow_timestamps=True):
# def _get_query_timestamps(args=None):
# def _parse_timestamp(timestamp):
# def set_resp_location_hdr(location):
# def get_pagination_options(sort, limit, marker, api_model):
# def get_query_value(queries, field, type=None):
# def is_field_exist(queries, field):
#
# Path: aodh/keystone_client.py
# CFG_GROUP = "service_credentials"
# OPTS = [
# cfg.StrOpt('region-name',
# default=os.environ.get('OS_REGION_NAME'),
# deprecated_name="os-region-name",
# help='Region name to use for OpenStack service endpoints.'),
# cfg.StrOpt('interface',
# default=os.environ.get(
# 'OS_INTERFACE', os.environ.get('OS_ENDPOINT_TYPE',
# 'public')),
# deprecated_name="os-endpoint-type",
# choices=('public', 'internal', 'admin', 'auth', 'publicURL',
# 'internalURL', 'adminURL'),
# help='Type of endpoint in Identity service catalog to use for '
# 'communication with OpenStack services.'),
# ]
# def get_session(conf):
# def get_client(conf):
# def get_trusted_client(conf, trust_id):
# def get_auth_token(client):
# def get_client_on_behalf_user(conf, auth_plugin):
# def create_trust_id(conf, trustor_user_id, trustor_project_id, roles,
# auth_plugin):
# def delete_trust_id(conf, trust_id, auth_plugin):
# def url_for(conf, **kwargs):
# def get_heat_client_from_trust(conf, trust_id):
# def register_keystoneauth_opts(conf):
. Output only the next line. | 'resources') |
Next line prediction: <|code_start|> raise GnocchiUnavailable(e)
class MetricOfResourceRule(AlarmGnocchiThresholdRule):
metric = wsme.wsattr(wtypes.text, mandatory=True)
"The name of the metric"
resource_id = wsme.wsattr(wtypes.text, mandatory=True)
"The id of a resource"
resource_type = wsme.wsattr(wtypes.text, mandatory=True)
"The resource type"
def as_dict(self):
rule = self.as_dict_from_keys(['granularity', 'comparison_operator',
'threshold', 'aggregation_method',
'evaluation_periods',
'metric',
'resource_id',
'resource_type'])
return rule
class AggregationMetricByResourcesLookupRule(AlarmGnocchiThresholdRule):
metric = wsme.wsattr(wtypes.text, mandatory=True)
"The name of the metric"
query = wsme.wsattr(wtypes.text, mandatory=True)
('The query to filter the metric, Don\'t forget to filter out '
'deleted resources (example: {"and": [{"=": {"ended_at": null}}, ...]}), '
<|code_end|>
. Use current file imports:
(import json
import threading
import cachetools
import pecan
import wsme
from gnocchiclient import client
from gnocchiclient import exceptions
from keystoneauth1 import exceptions as ka_exceptions
from oslo_config import cfg
from oslo_log import log
from wsme import types as wtypes
from aodh.api.controllers.v2 import base
from aodh.api.controllers.v2 import utils as v2_utils
from aodh import keystone_client)
and context including class names, function names, or small code snippets from other files:
# Path: aodh/api/controllers/v2/base.py
# class ClientSideError(wsme.exc.ClientSideError):
# class ProjectNotAuthorized(ClientSideError):
# class AdvEnum(wtypes.wsproperty):
# class Base(wtypes.DynamicBase):
# class Query(Base):
# class AlarmNotFound(ClientSideError):
# class AlarmRule(Base):
# def __init__(self, error, status_code=400):
# def __init__(self, id, aspect='project'):
# def __init__(self, name, *args, **kwargs):
# def _get(self, parent):
# def _set(self, parent, value):
# def from_db_model(cls, m):
# def from_db_and_links(cls, m, links):
# def as_dict(self, db_model):
# def as_dict_from_keys(self, keys):
# def to_dict(self):
# def get_op(self):
# def set_op(self, value):
# def __repr__(self):
# def sample(cls):
# def as_dict(self):
# def get_value(self, forced_type=None):
# def __init__(self, alarm, auth_project):
# def validate_alarm(alarm):
# def create_hook(alarm):
# def update_hook(alarm):
#
# Path: aodh/api/controllers/v2/utils.py
# def get_auth_project(on_behalf_of=None):
# def sanitize_query(query, db_func, on_behalf_of=None):
# def _verify_query_segregation(query, auth_project=None):
# def validate_query(query, db_func, internal_keys=None,
# allow_timestamps=True):
# def _is_field_metadata(field):
# def _validate_timestamp_fields(query, field_name, operator_list,
# allow_timestamps):
# def query_to_kwargs(query, db_func, internal_keys=None,
# allow_timestamps=True):
# def _get_query_timestamps(args=None):
# def _parse_timestamp(timestamp):
# def set_resp_location_hdr(location):
# def get_pagination_options(sort, limit, marker, api_model):
# def get_query_value(queries, field, type=None):
# def is_field_exist(queries, field):
#
# Path: aodh/keystone_client.py
# CFG_GROUP = "service_credentials"
# OPTS = [
# cfg.StrOpt('region-name',
# default=os.environ.get('OS_REGION_NAME'),
# deprecated_name="os-region-name",
# help='Region name to use for OpenStack service endpoints.'),
# cfg.StrOpt('interface',
# default=os.environ.get(
# 'OS_INTERFACE', os.environ.get('OS_ENDPOINT_TYPE',
# 'public')),
# deprecated_name="os-endpoint-type",
# choices=('public', 'internal', 'admin', 'auth', 'publicURL',
# 'internalURL', 'adminURL'),
# help='Type of endpoint in Identity service catalog to use for '
# 'communication with OpenStack services.'),
# ]
# def get_session(conf):
# def get_client(conf):
# def get_trusted_client(conf, trust_id):
# def get_auth_token(client):
# def get_client_on_behalf_user(conf, auth_plugin):
# def create_trust_id(conf, trustor_user_id, trustor_project_id, roles,
# auth_plugin):
# def delete_trust_id(conf, trust_id, auth_plugin):
# def url_for(conf, **kwargs):
# def get_heat_client_from_trust(conf, trust_id):
# def register_keystoneauth_opts(conf):
. Output only the next line. | 'Otherwise Gnocchi will try to create the aggregate against obsolete ' |
Here is a snippet: <|code_start|> "--config-file=%s" % self.tempfile])
self.assertEqual(0, subp.wait())
def test_run_expirer_ttl_disabled(self):
subp = subprocess.Popen(['aodh-expirer',
'-d',
"--config-file=%s" % self.tempfile],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, __ = subp.communicate()
self.assertEqual(0, subp.poll())
self.assertIn(b"Nothing to clean, database alarm history "
b"time to live is disabled", out)
def test_run_expirer_ttl_enabled(self):
content = ("[database]\n"
"alarm_history_time_to_live=1\n"
"alarm_histories_delete_batch_size=10\n"
"connection=log://localhost\n")
content = content.encode('utf-8')
self.tempfile = fileutils.write_to_tempfile(content=content,
prefix='aodh',
suffix='.conf')
subp = subprocess.Popen(['aodh-expirer',
'-d',
"--config-file=%s" % self.tempfile],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, __ = subp.communicate()
self.assertEqual(0, subp.poll())
<|code_end|>
. Write the next line using the current file imports:
import os
import subprocess
from oslo_utils import fileutils
from aodh.tests import base
and context from other files:
# Path: aodh/tests/base.py
# class BaseTestCase(base.BaseTestCase):
# class SkipNotImplementedMeta(type):
# def setup_messaging(self, conf, exchange=None):
# def assertTimestampEqual(self, first, second, msg=None):
# def assertIsEmpty(self, obj):
# def assertIsNotEmpty(self, obj):
# def assertDictContains(self, parent, child):
# def path_get(project_file=None):
# def assert_single_item(self, items, **filters):
# def assert_multiple_items(self, items, count, **filters):
# def _matches(item, **props):
# def _skip_decorator(func):
# def skip_if_not_implemented(*args, **kwargs):
# def __new__(cls, name, bases, local):
, which may include functions, classes, or code. Output only the next line. | msg = "Dropping alarm history 10 data with TTL 1" |
Given the following code snippet before the placeholder: <|code_start|>#
# Copyright 2015 Huawei Technologies Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class ABCSkip(base.SkipNotImplementedMeta, abc.ABCMeta):
pass
class ModelsMigrationsSync(tests_db.TestBase,
test_migrations.ModelsMigrationsSync,
<|code_end|>
, predict the next line using imports from the current file:
import abc
from unittest import mock
from oslo_db.sqlalchemy import test_migrations
from aodh.storage.sqlalchemy import models
from aodh.tests import base
from aodh.tests.functional import db as tests_db
and context including class names, function names, and sometimes code from other files:
# Path: aodh/storage/sqlalchemy/models.py
# class JSONEncodedDict(TypeDecorator):
# class TimestampUTC(TypeDecorator):
# class AodhBase(object):
# class Alarm(Base):
# class AlarmChange(Base):
# class Quota(Base):
# def process_bind_param(value, dialect):
# def process_result_value(value, dialect):
# def load_dialect_impl(self, dialect):
# def __setitem__(self, key, value):
# def __getitem__(self, key):
# def update(self, values):
#
# Path: aodh/tests/base.py
# class BaseTestCase(base.BaseTestCase):
# class SkipNotImplementedMeta(type):
# def setup_messaging(self, conf, exchange=None):
# def assertTimestampEqual(self, first, second, msg=None):
# def assertIsEmpty(self, obj):
# def assertIsNotEmpty(self, obj):
# def assertDictContains(self, parent, child):
# def path_get(project_file=None):
# def assert_single_item(self, items, **filters):
# def assert_multiple_items(self, items, count, **filters):
# def _matches(item, **props):
# def _skip_decorator(func):
# def skip_if_not_implemented(*args, **kwargs):
# def __new__(cls, name, bases, local):
#
# Path: aodh/tests/functional/db.py
# class SQLManager(fixtures.Fixture):
# class PgSQLManager(SQLManager):
# class MySQLManager(SQLManager):
# class SQLiteManager(fixtures.Fixture):
# class TestBase(test_base.BaseTestCase,
# metaclass=test_base.SkipNotImplementedMeta):
# def __init__(self, conf):
# def _create_db(conn, db_name):
# def _create_db(conn, db_name):
# def __init__(self, conf):
# def setUp(self):
# def tearDown(self):
# def _get_connection(self, conf):
# DRIVER_MANAGERS = {
# 'mysql': MySQLManager,
# 'postgresql': PgSQLManager,
# 'sqlite': SQLiteManager,
# }
. Output only the next line. | metaclass=ABCSkip): |
Continue the code snippet: <|code_start|>#
# Copyright 2015 Huawei Technologies Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class ABCSkip(base.SkipNotImplementedMeta, abc.ABCMeta):
pass
class ModelsMigrationsSync(tests_db.TestBase,
test_migrations.ModelsMigrationsSync,
metaclass=ABCSkip):
def setUp(self):
<|code_end|>
. Use current file imports:
import abc
from unittest import mock
from oslo_db.sqlalchemy import test_migrations
from aodh.storage.sqlalchemy import models
from aodh.tests import base
from aodh.tests.functional import db as tests_db
and context (classes, functions, or code) from other files:
# Path: aodh/storage/sqlalchemy/models.py
# class JSONEncodedDict(TypeDecorator):
# class TimestampUTC(TypeDecorator):
# class AodhBase(object):
# class Alarm(Base):
# class AlarmChange(Base):
# class Quota(Base):
# def process_bind_param(value, dialect):
# def process_result_value(value, dialect):
# def load_dialect_impl(self, dialect):
# def __setitem__(self, key, value):
# def __getitem__(self, key):
# def update(self, values):
#
# Path: aodh/tests/base.py
# class BaseTestCase(base.BaseTestCase):
# class SkipNotImplementedMeta(type):
# def setup_messaging(self, conf, exchange=None):
# def assertTimestampEqual(self, first, second, msg=None):
# def assertIsEmpty(self, obj):
# def assertIsNotEmpty(self, obj):
# def assertDictContains(self, parent, child):
# def path_get(project_file=None):
# def assert_single_item(self, items, **filters):
# def assert_multiple_items(self, items, count, **filters):
# def _matches(item, **props):
# def _skip_decorator(func):
# def skip_if_not_implemented(*args, **kwargs):
# def __new__(cls, name, bases, local):
#
# Path: aodh/tests/functional/db.py
# class SQLManager(fixtures.Fixture):
# class PgSQLManager(SQLManager):
# class MySQLManager(SQLManager):
# class SQLiteManager(fixtures.Fixture):
# class TestBase(test_base.BaseTestCase,
# metaclass=test_base.SkipNotImplementedMeta):
# def __init__(self, conf):
# def _create_db(conn, db_name):
# def _create_db(conn, db_name):
# def __init__(self, conf):
# def setUp(self):
# def tearDown(self):
# def _get_connection(self, conf):
# DRIVER_MANAGERS = {
# 'mysql': MySQLManager,
# 'postgresql': PgSQLManager,
# 'sqlite': SQLiteManager,
# }
. Output only the next line. | super(ModelsMigrationsSync, self).setUp() |
Next line prediction: <|code_start|>#
# Copyright 2015 Huawei Technologies Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class ABCSkip(base.SkipNotImplementedMeta, abc.ABCMeta):
pass
class ModelsMigrationsSync(tests_db.TestBase,
test_migrations.ModelsMigrationsSync,
metaclass=ABCSkip):
def setUp(self):
super(ModelsMigrationsSync, self).setUp()
self.db = mock.Mock()
<|code_end|>
. Use current file imports:
(import abc
from unittest import mock
from oslo_db.sqlalchemy import test_migrations
from aodh.storage.sqlalchemy import models
from aodh.tests import base
from aodh.tests.functional import db as tests_db)
and context including class names, function names, or small code snippets from other files:
# Path: aodh/storage/sqlalchemy/models.py
# class JSONEncodedDict(TypeDecorator):
# class TimestampUTC(TypeDecorator):
# class AodhBase(object):
# class Alarm(Base):
# class AlarmChange(Base):
# class Quota(Base):
# def process_bind_param(value, dialect):
# def process_result_value(value, dialect):
# def load_dialect_impl(self, dialect):
# def __setitem__(self, key, value):
# def __getitem__(self, key):
# def update(self, values):
#
# Path: aodh/tests/base.py
# class BaseTestCase(base.BaseTestCase):
# class SkipNotImplementedMeta(type):
# def setup_messaging(self, conf, exchange=None):
# def assertTimestampEqual(self, first, second, msg=None):
# def assertIsEmpty(self, obj):
# def assertIsNotEmpty(self, obj):
# def assertDictContains(self, parent, child):
# def path_get(project_file=None):
# def assert_single_item(self, items, **filters):
# def assert_multiple_items(self, items, count, **filters):
# def _matches(item, **props):
# def _skip_decorator(func):
# def skip_if_not_implemented(*args, **kwargs):
# def __new__(cls, name, bases, local):
#
# Path: aodh/tests/functional/db.py
# class SQLManager(fixtures.Fixture):
# class PgSQLManager(SQLManager):
# class MySQLManager(SQLManager):
# class SQLiteManager(fixtures.Fixture):
# class TestBase(test_base.BaseTestCase,
# metaclass=test_base.SkipNotImplementedMeta):
# def __init__(self, conf):
# def _create_db(conn, db_name):
# def _create_db(conn, db_name):
# def __init__(self, conf):
# def setUp(self):
# def tearDown(self):
# def _get_connection(self, conf):
# DRIVER_MANAGERS = {
# 'mysql': MySQLManager,
# 'postgresql': PgSQLManager,
# 'sqlite': SQLiteManager,
# }
. Output only the next line. | @staticmethod |
Predict the next line for this snippet: <|code_start|># Copyright 2019 Catalyst Cloud Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class LoadBalancerMemberHealthRule(base.AlarmRule):
pool_id = wsme.wsattr(wtypes.text, mandatory=True)
"ID of a load balancer pool the members belongs to."
stack_id = wsme.wsattr(wtypes.text, mandatory=True)
"ID of a Heat stack which contains the load balancer member."
autoscaling_group_id = wsme.wsattr(wtypes.text, mandatory=True)
<|code_end|>
with the help of current file imports:
import wsme
from wsme import types as wtypes
from aodh.api.controllers.v2 import base
and context from other files:
# Path: aodh/api/controllers/v2/base.py
# class ClientSideError(wsme.exc.ClientSideError):
# class ProjectNotAuthorized(ClientSideError):
# class AdvEnum(wtypes.wsproperty):
# class Base(wtypes.DynamicBase):
# class Query(Base):
# class AlarmNotFound(ClientSideError):
# class AlarmRule(Base):
# def __init__(self, error, status_code=400):
# def __init__(self, id, aspect='project'):
# def __init__(self, name, *args, **kwargs):
# def _get(self, parent):
# def _set(self, parent, value):
# def from_db_model(cls, m):
# def from_db_and_links(cls, m, links):
# def as_dict(self, db_model):
# def as_dict_from_keys(self, keys):
# def to_dict(self):
# def get_op(self):
# def set_op(self, value):
# def __repr__(self):
# def sample(cls):
# def as_dict(self):
# def get_value(self, forced_type=None):
# def __init__(self, alarm, auth_project):
# def validate_alarm(alarm):
# def create_hook(alarm):
# def update_hook(alarm):
, which may contain function names, class names, or code. Output only the next line. | "ID of a Heat autoscaling group that contains the load balancer member." |
Given the following code snippet before the placeholder: <|code_start|>#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
operation_kind = ('lt', 'le', 'eq', 'ne', 'ge', 'gt')
operation_kind_enum = wtypes.Enum(str, *operation_kind)
class ClientSideError(wsme.exc.ClientSideError):
def __init__(self, error, status_code=400):
pecan.response.translatable_error = error
super(ClientSideError, self).__init__(error, status_code)
class ProjectNotAuthorized(ClientSideError):
def __init__(self, id, aspect='project'):
params = dict(aspect=aspect, id=id)
<|code_end|>
, predict the next line using imports from the current file:
import ast
import datetime
import functools
import pecan
import wsme
from oslo_utils import strutils
from oslo_utils import timeutils
from wsme import types as wtypes
from aodh.i18n import _
from aodh.utils import get_func_valid_keys
and context including class names, function names, and sometimes code from other files:
# Path: aodh/i18n.py
# DOMAIN = 'aodh'
# def translate(value, user_locale):
# def get_available_languages():
#
# Path: aodh/utils.py
# def get_func_valid_keys(func):
# return inspect.getfullargspec(func)[0]
. Output only the next line. | super(ProjectNotAuthorized, self).__init__( |
Continue the code snippet: <|code_start|># Copyright 2015 Huawei Technologies Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
operation_kind = ('lt', 'le', 'eq', 'ne', 'ge', 'gt')
operation_kind_enum = wtypes.Enum(str, *operation_kind)
class ClientSideError(wsme.exc.ClientSideError):
def __init__(self, error, status_code=400):
pecan.response.translatable_error = error
super(ClientSideError, self).__init__(error, status_code)
class ProjectNotAuthorized(ClientSideError):
def __init__(self, id, aspect='project'):
<|code_end|>
. Use current file imports:
import ast
import datetime
import functools
import pecan
import wsme
from oslo_utils import strutils
from oslo_utils import timeutils
from wsme import types as wtypes
from aodh.i18n import _
from aodh.utils import get_func_valid_keys
and context (classes, functions, or code) from other files:
# Path: aodh/i18n.py
# DOMAIN = 'aodh'
# def translate(value, user_locale):
# def get_available_languages():
#
# Path: aodh/utils.py
# def get_func_valid_keys(func):
# return inspect.getfullargspec(func)[0]
. Output only the next line. | params = dict(aspect=aspect, id=id) |
Given the following code snippet before the placeholder: <|code_start|>#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class InvalidCompositeRule(base.ClientSideError):
def __init__(self, error):
err = _('Invalid input composite rule: %s, it should '
<|code_end|>
, predict the next line using imports from the current file:
import json
from stevedore import named
from wsme.rest import json as wjson
from wsme import types as wtypes
from aodh.api.controllers.v2 import base
from aodh.i18n import _
and context including class names, function names, and sometimes code from other files:
# Path: aodh/api/controllers/v2/base.py
# class ClientSideError(wsme.exc.ClientSideError):
# class ProjectNotAuthorized(ClientSideError):
# class AdvEnum(wtypes.wsproperty):
# class Base(wtypes.DynamicBase):
# class Query(Base):
# class AlarmNotFound(ClientSideError):
# class AlarmRule(Base):
# def __init__(self, error, status_code=400):
# def __init__(self, id, aspect='project'):
# def __init__(self, name, *args, **kwargs):
# def _get(self, parent):
# def _set(self, parent, value):
# def from_db_model(cls, m):
# def from_db_and_links(cls, m, links):
# def as_dict(self, db_model):
# def as_dict_from_keys(self, keys):
# def to_dict(self):
# def get_op(self):
# def set_op(self, value):
# def __repr__(self):
# def sample(cls):
# def as_dict(self):
# def get_value(self, forced_type=None):
# def __init__(self, alarm, auth_project):
# def validate_alarm(alarm):
# def create_hook(alarm):
# def update_hook(alarm):
#
# Path: aodh/i18n.py
# DOMAIN = 'aodh'
# def translate(value, user_locale):
# def get_available_languages():
. Output only the next line. | 'be a dict with an "and" or "or" as key, and the ' |
Based on the snippet: <|code_start|>#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class InvalidCompositeRule(base.ClientSideError):
def __init__(self, error):
err = _('Invalid input composite rule: %s, it should '
'be a dict with an "and" or "or" as key, and the '
<|code_end|>
, predict the immediate next line with the help of imports:
import json
from stevedore import named
from wsme.rest import json as wjson
from wsme import types as wtypes
from aodh.api.controllers.v2 import base
from aodh.i18n import _
and context (classes, functions, sometimes code) from other files:
# Path: aodh/api/controllers/v2/base.py
# class ClientSideError(wsme.exc.ClientSideError):
# class ProjectNotAuthorized(ClientSideError):
# class AdvEnum(wtypes.wsproperty):
# class Base(wtypes.DynamicBase):
# class Query(Base):
# class AlarmNotFound(ClientSideError):
# class AlarmRule(Base):
# def __init__(self, error, status_code=400):
# def __init__(self, id, aspect='project'):
# def __init__(self, name, *args, **kwargs):
# def _get(self, parent):
# def _set(self, parent, value):
# def from_db_model(cls, m):
# def from_db_and_links(cls, m, links):
# def as_dict(self, db_model):
# def as_dict_from_keys(self, keys):
# def to_dict(self):
# def get_op(self):
# def set_op(self, value):
# def __repr__(self):
# def sample(cls):
# def as_dict(self):
# def get_value(self, forced_type=None):
# def __init__(self, alarm, auth_project):
# def validate_alarm(alarm):
# def create_hook(alarm):
# def update_hook(alarm):
#
# Path: aodh/i18n.py
# DOMAIN = 'aodh'
# def translate(value, user_locale):
# def get_available_languages():
. Output only the next line. | 'value of dict should be a list of basic threshold ' |
Predict the next line after this snippet: <|code_start|># Copyright 2020 Catalyst Cloud LTD.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
LOG = log.getLogger(__name__)
ALLOWED_RESOURCES = ('alarms',)
class Quota(base.Base):
resource = wtypes.wsattr(wtypes.Enum(str, *ALLOWED_RESOURCES),
mandatory=True)
<|code_end|>
using the current file's imports:
from oslo_log import log
from pecan import rest
from wsme import types as wtypes
from aodh.api.controllers.v2 import base
from aodh.api import rbac
import pecan
import wsme
import wsmeext.pecan as wsme_pecan
and any relevant context from other files:
# Path: aodh/api/controllers/v2/base.py
# class ClientSideError(wsme.exc.ClientSideError):
# class ProjectNotAuthorized(ClientSideError):
# class AdvEnum(wtypes.wsproperty):
# class Base(wtypes.DynamicBase):
# class Query(Base):
# class AlarmNotFound(ClientSideError):
# class AlarmRule(Base):
# def __init__(self, error, status_code=400):
# def __init__(self, id, aspect='project'):
# def __init__(self, name, *args, **kwargs):
# def _get(self, parent):
# def _set(self, parent, value):
# def from_db_model(cls, m):
# def from_db_and_links(cls, m, links):
# def as_dict(self, db_model):
# def as_dict_from_keys(self, keys):
# def to_dict(self):
# def get_op(self):
# def set_op(self, value):
# def __repr__(self):
# def sample(cls):
# def as_dict(self):
# def get_value(self, forced_type=None):
# def __init__(self, alarm, auth_project):
# def validate_alarm(alarm):
# def create_hook(alarm):
# def update_hook(alarm):
#
# Path: aodh/api/rbac.py
# def target_from_segregation_rule(headers, enforcer):
# def enforce(policy_name, headers, enforcer, target):
# def get_limited_to(headers, enforcer):
# def get_limited_to_project(headers, enforcer):
# def is_admin(headers):
. Output only the next line. | limit = wsme.wsattr(wtypes.IntegerType(minimum=-1), mandatory=True) |
Using the snippet: <|code_start|># Copyright 2020 Catalyst Cloud LTD.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
LOG = log.getLogger(__name__)
ALLOWED_RESOURCES = ('alarms',)
class Quota(base.Base):
resource = wtypes.wsattr(wtypes.Enum(str, *ALLOWED_RESOURCES),
mandatory=True)
limit = wsme.wsattr(wtypes.IntegerType(minimum=-1), mandatory=True)
class Quotas(base.Base):
<|code_end|>
, determine the next line of code. You have imports:
from oslo_log import log
from pecan import rest
from wsme import types as wtypes
from aodh.api.controllers.v2 import base
from aodh.api import rbac
import pecan
import wsme
import wsmeext.pecan as wsme_pecan
and context (class names, function names, or code) available:
# Path: aodh/api/controllers/v2/base.py
# class ClientSideError(wsme.exc.ClientSideError):
# class ProjectNotAuthorized(ClientSideError):
# class AdvEnum(wtypes.wsproperty):
# class Base(wtypes.DynamicBase):
# class Query(Base):
# class AlarmNotFound(ClientSideError):
# class AlarmRule(Base):
# def __init__(self, error, status_code=400):
# def __init__(self, id, aspect='project'):
# def __init__(self, name, *args, **kwargs):
# def _get(self, parent):
# def _set(self, parent, value):
# def from_db_model(cls, m):
# def from_db_and_links(cls, m, links):
# def as_dict(self, db_model):
# def as_dict_from_keys(self, keys):
# def to_dict(self):
# def get_op(self):
# def set_op(self, value):
# def __repr__(self):
# def sample(cls):
# def as_dict(self):
# def get_value(self, forced_type=None):
# def __init__(self, alarm, auth_project):
# def validate_alarm(alarm):
# def create_hook(alarm):
# def update_hook(alarm):
#
# Path: aodh/api/rbac.py
# def target_from_segregation_rule(headers, enforcer):
# def enforce(policy_name, headers, enforcer, target):
# def get_limited_to(headers, enforcer):
# def get_limited_to_project(headers, enforcer):
# def is_admin(headers):
. Output only the next line. | project_id = wsme.wsattr(wtypes.text, mandatory=True) |
Using the snippet: <|code_start|> t1 = messaging.get_transport(self.CONF, 'fake://')
t2 = messaging.get_transport(self.CONF, 'fake://')
self.assertEqual(t1, t2)
def test_get_transport_default_url_caching(self):
t1 = messaging.get_transport(self.CONF, )
t2 = messaging.get_transport(self.CONF, )
self.assertEqual(t1, t2)
def test_get_transport_default_url_no_caching(self):
t1 = messaging.get_transport(self.CONF, cache=False)
t2 = messaging.get_transport(self.CONF, cache=False)
self.assertNotEqual(t1, t2)
def test_get_transport_url_no_caching(self):
t1 = messaging.get_transport(self.CONF, 'fake://', cache=False)
t2 = messaging.get_transport(self.CONF, 'fake://', cache=False)
self.assertNotEqual(t1, t2)
def test_get_transport_default_url_caching_mix(self):
t1 = messaging.get_transport(self.CONF, )
t2 = messaging.get_transport(self.CONF, cache=False)
self.assertNotEqual(t1, t2)
def test_get_transport_url_caching_mix(self):
t1 = messaging.get_transport(self.CONF, 'fake://')
t2 = messaging.get_transport(self.CONF, 'fake://', cache=False)
self.assertNotEqual(t1, t2)
def test_get_transport_optional(self):
<|code_end|>
, determine the next line of code. You have imports:
from oslo_config import fixture as fixture_config
from oslotest import base
from aodh import messaging
import oslo_messaging.conffixture
and context (class names, function names, or code) available:
# Path: aodh/messaging.py
# DEFAULT_URL = "__default__"
# TRANSPORTS = {}
# _SERIALIZER = oslo_serializer.JsonPayloadSerializer()
# def setup():
# def get_transport(conf, url=None, optional=False, cache=True):
# def get_batch_notification_listener(transport, targets, endpoints,
# allow_requeue=False,
# batch_size=1, batch_timeout=None):
# def get_notifier(transport, publisher_id):
. Output only the next line. | self.CONF.set_override('transport_url', 'non-url') |
Continue the code snippet: <|code_start|># Copyright 2015 eNovance
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
LOG = log.getLogger(__name__)
# The list of points that Gnocchi API returned is composed
# of tuples with (timestamp, granularity, value)
GRANULARITY = 1
VALUE = 2
class GnocchiBase(threshold.ThresholdEvaluator):
def __init__(self, conf):
super(GnocchiBase, self).__init__(conf)
self._gnocchi_client = client.Client(
'1', keystone_client.get_session(conf),
adapter_options={
<|code_end|>
. Use current file imports:
import json
from gnocchiclient import client
from gnocchiclient import exceptions
from oslo_log import log
from aodh.evaluator import threshold
from aodh import keystone_client
and context (classes, functions, or code) from other files:
# Path: aodh/evaluator/threshold.py
# LOG = log.getLogger(__name__)
# COMPARATORS = {
# 'gt': operator.gt,
# 'lt': operator.lt,
# 'ge': operator.ge,
# 'le': operator.le,
# 'eq': operator.eq,
# 'ne': operator.ne,
# }
# OPTS = [
# cfg.IntOpt('additional_ingestion_lag',
# min=0,
# default=0,
# help='The number of seconds to extend the evaluation windows '
# 'to compensate the reporting/ingestion lag.')
# ]
# class InsufficientDataError(Exception):
# class ThresholdEvaluator(evaluator.Evaluator):
# def __init__(self, reason, statistics):
# def _bound_duration(self, rule):
# def _reason_data(disposition, count, most_recent):
# def _reason(cls, alarm, statistics, state, count):
# def evaluate_rule(self, alarm_rule):
# def _compare(value):
# def _transition_alarm(self, alarm, state, trending_state, statistics,
# outside_count, unknown_reason):
# def evaluate(self, alarm):
#
# Path: aodh/keystone_client.py
# CFG_GROUP = "service_credentials"
# OPTS = [
# cfg.StrOpt('region-name',
# default=os.environ.get('OS_REGION_NAME'),
# deprecated_name="os-region-name",
# help='Region name to use for OpenStack service endpoints.'),
# cfg.StrOpt('interface',
# default=os.environ.get(
# 'OS_INTERFACE', os.environ.get('OS_ENDPOINT_TYPE',
# 'public')),
# deprecated_name="os-endpoint-type",
# choices=('public', 'internal', 'admin', 'auth', 'publicURL',
# 'internalURL', 'adminURL'),
# help='Type of endpoint in Identity service catalog to use for '
# 'communication with OpenStack services.'),
# ]
# def get_session(conf):
# def get_client(conf):
# def get_trusted_client(conf, trust_id):
# def get_auth_token(client):
# def get_client_on_behalf_user(conf, auth_plugin):
# def create_trust_id(conf, trustor_user_id, trustor_project_id, roles,
# auth_plugin):
# def delete_trust_id(conf, trust_id, auth_plugin):
# def url_for(conf, **kwargs):
# def get_heat_client_from_trust(conf, trust_id):
# def register_keystoneauth_opts(conf):
. Output only the next line. | 'interface': conf.service_credentials.interface, |
Predict the next line for this snippet: <|code_start|>#
# Copyright 2015 eNovance
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
LOG = log.getLogger(__name__)
# The list of points that Gnocchi API returned is composed
# of tuples with (timestamp, granularity, value)
GRANULARITY = 1
VALUE = 2
class GnocchiBase(threshold.ThresholdEvaluator):
def __init__(self, conf):
super(GnocchiBase, self).__init__(conf)
self._gnocchi_client = client.Client(
'1', keystone_client.get_session(conf),
<|code_end|>
with the help of current file imports:
import json
from gnocchiclient import client
from gnocchiclient import exceptions
from oslo_log import log
from aodh.evaluator import threshold
from aodh import keystone_client
and context from other files:
# Path: aodh/evaluator/threshold.py
# LOG = log.getLogger(__name__)
# COMPARATORS = {
# 'gt': operator.gt,
# 'lt': operator.lt,
# 'ge': operator.ge,
# 'le': operator.le,
# 'eq': operator.eq,
# 'ne': operator.ne,
# }
# OPTS = [
# cfg.IntOpt('additional_ingestion_lag',
# min=0,
# default=0,
# help='The number of seconds to extend the evaluation windows '
# 'to compensate the reporting/ingestion lag.')
# ]
# class InsufficientDataError(Exception):
# class ThresholdEvaluator(evaluator.Evaluator):
# def __init__(self, reason, statistics):
# def _bound_duration(self, rule):
# def _reason_data(disposition, count, most_recent):
# def _reason(cls, alarm, statistics, state, count):
# def evaluate_rule(self, alarm_rule):
# def _compare(value):
# def _transition_alarm(self, alarm, state, trending_state, statistics,
# outside_count, unknown_reason):
# def evaluate(self, alarm):
#
# Path: aodh/keystone_client.py
# CFG_GROUP = "service_credentials"
# OPTS = [
# cfg.StrOpt('region-name',
# default=os.environ.get('OS_REGION_NAME'),
# deprecated_name="os-region-name",
# help='Region name to use for OpenStack service endpoints.'),
# cfg.StrOpt('interface',
# default=os.environ.get(
# 'OS_INTERFACE', os.environ.get('OS_ENDPOINT_TYPE',
# 'public')),
# deprecated_name="os-endpoint-type",
# choices=('public', 'internal', 'admin', 'auth', 'publicURL',
# 'internalURL', 'adminURL'),
# help='Type of endpoint in Identity service catalog to use for '
# 'communication with OpenStack services.'),
# ]
# def get_session(conf):
# def get_client(conf):
# def get_trusted_client(conf, trust_id):
# def get_auth_token(client):
# def get_client_on_behalf_user(conf, auth_plugin):
# def create_trust_id(conf, trustor_user_id, trustor_project_id, roles,
# auth_plugin):
# def delete_trust_id(conf, trust_id, auth_plugin):
# def url_for(conf, **kwargs):
# def get_heat_client_from_trust(conf, trust_id):
# def register_keystoneauth_opts(conf):
, which may contain function names, class names, or code. Output only the next line. | adapter_options={ |
Here is a snippet: <|code_start|>#
# Copyright 2015 NEC Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# Schema validation for the event type query.
_q_validator = voluptuous.Schema(
{"field": voluptuous.Match(r"^[a-zA-Z.',0-9_-]*$"),
"op": voluptuous.In(base.operation_kind),
<|code_end|>
. Write the next line using the current file imports:
import voluptuous
import wsme
from wsme import types as wtypes
from aodh.api.controllers.v2 import base
from aodh.i18n import _
and context from other files:
# Path: aodh/api/controllers/v2/base.py
# class ClientSideError(wsme.exc.ClientSideError):
# class ProjectNotAuthorized(ClientSideError):
# class AdvEnum(wtypes.wsproperty):
# class Base(wtypes.DynamicBase):
# class Query(Base):
# class AlarmNotFound(ClientSideError):
# class AlarmRule(Base):
# def __init__(self, error, status_code=400):
# def __init__(self, id, aspect='project'):
# def __init__(self, name, *args, **kwargs):
# def _get(self, parent):
# def _set(self, parent, value):
# def from_db_model(cls, m):
# def from_db_and_links(cls, m, links):
# def as_dict(self, db_model):
# def as_dict_from_keys(self, keys):
# def to_dict(self):
# def get_op(self):
# def set_op(self, value):
# def __repr__(self):
# def sample(cls):
# def as_dict(self):
# def get_value(self, forced_type=None):
# def __init__(self, alarm, auth_project):
# def validate_alarm(alarm):
# def create_hook(alarm):
# def update_hook(alarm):
#
# Path: aodh/i18n.py
# DOMAIN = 'aodh'
# def translate(value, user_locale):
# def get_available_languages():
, which may include functions, classes, or code. Output only the next line. | "value": voluptuous.In(["string", "integer", "float", "boolean", ""])}) |
Predict the next line after this snippet: <|code_start|># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def get_auth_project(on_behalf_of=None):
# when an alarm is created by an admin on behalf of another tenant
# we must ensure for:
# - threshold alarm, that an implicit query constraint on project_id is
# added so that admin-level visibility on statistics is not leaked
# Hence, for null auth_project (indicating admin-ness) we check if
# the creating tenant differs from the tenant on whose behalf the
# alarm is being created
auth_project = rbac.get_limited_to_project(pecan.request.headers,
pecan.request.enforcer)
created_by = pecan.request.headers.get('X-Project-Id')
is_admin = auth_project is None
if is_admin and on_behalf_of != created_by:
<|code_end|>
using the current file's imports:
import copy
import datetime
import pecan
import wsme
from oslo_utils import timeutils
from urllib import parse as urllib_parse
from aodh.api.controllers.v2 import base
from aodh.api import rbac
from aodh.utils import get_func_valid_keys
and any relevant context from other files:
# Path: aodh/api/controllers/v2/base.py
# class ClientSideError(wsme.exc.ClientSideError):
# class ProjectNotAuthorized(ClientSideError):
# class AdvEnum(wtypes.wsproperty):
# class Base(wtypes.DynamicBase):
# class Query(Base):
# class AlarmNotFound(ClientSideError):
# class AlarmRule(Base):
# def __init__(self, error, status_code=400):
# def __init__(self, id, aspect='project'):
# def __init__(self, name, *args, **kwargs):
# def _get(self, parent):
# def _set(self, parent, value):
# def from_db_model(cls, m):
# def from_db_and_links(cls, m, links):
# def as_dict(self, db_model):
# def as_dict_from_keys(self, keys):
# def to_dict(self):
# def get_op(self):
# def set_op(self, value):
# def __repr__(self):
# def sample(cls):
# def as_dict(self):
# def get_value(self, forced_type=None):
# def __init__(self, alarm, auth_project):
# def validate_alarm(alarm):
# def create_hook(alarm):
# def update_hook(alarm):
#
# Path: aodh/api/rbac.py
# def target_from_segregation_rule(headers, enforcer):
# def enforce(policy_name, headers, enforcer, target):
# def get_limited_to(headers, enforcer):
# def get_limited_to_project(headers, enforcer):
# def is_admin(headers):
#
# Path: aodh/utils.py
# def get_func_valid_keys(func):
# return inspect.getfullargspec(func)[0]
. Output only the next line. | auth_project = on_behalf_of |
Given the code snippet: <|code_start|># not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def get_auth_project(on_behalf_of=None):
# when an alarm is created by an admin on behalf of another tenant
# we must ensure for:
# - threshold alarm, that an implicit query constraint on project_id is
# added so that admin-level visibility on statistics is not leaked
# Hence, for null auth_project (indicating admin-ness) we check if
# the creating tenant differs from the tenant on whose behalf the
# alarm is being created
auth_project = rbac.get_limited_to_project(pecan.request.headers,
pecan.request.enforcer)
created_by = pecan.request.headers.get('X-Project-Id')
is_admin = auth_project is None
if is_admin and on_behalf_of != created_by:
auth_project = on_behalf_of
<|code_end|>
, generate the next line using the imports in this file:
import copy
import datetime
import pecan
import wsme
from oslo_utils import timeutils
from urllib import parse as urllib_parse
from aodh.api.controllers.v2 import base
from aodh.api import rbac
from aodh.utils import get_func_valid_keys
and context (functions, classes, or occasionally code) from other files:
# Path: aodh/api/controllers/v2/base.py
# class ClientSideError(wsme.exc.ClientSideError):
# class ProjectNotAuthorized(ClientSideError):
# class AdvEnum(wtypes.wsproperty):
# class Base(wtypes.DynamicBase):
# class Query(Base):
# class AlarmNotFound(ClientSideError):
# class AlarmRule(Base):
# def __init__(self, error, status_code=400):
# def __init__(self, id, aspect='project'):
# def __init__(self, name, *args, **kwargs):
# def _get(self, parent):
# def _set(self, parent, value):
# def from_db_model(cls, m):
# def from_db_and_links(cls, m, links):
# def as_dict(self, db_model):
# def as_dict_from_keys(self, keys):
# def to_dict(self):
# def get_op(self):
# def set_op(self, value):
# def __repr__(self):
# def sample(cls):
# def as_dict(self):
# def get_value(self, forced_type=None):
# def __init__(self, alarm, auth_project):
# def validate_alarm(alarm):
# def create_hook(alarm):
# def update_hook(alarm):
#
# Path: aodh/api/rbac.py
# def target_from_segregation_rule(headers, enforcer):
# def enforce(policy_name, headers, enforcer, target):
# def get_limited_to(headers, enforcer):
# def get_limited_to_project(headers, enforcer):
# def is_admin(headers):
#
# Path: aodh/utils.py
# def get_func_valid_keys(func):
# return inspect.getfullargspec(func)[0]
. Output only the next line. | return auth_project |
Next line prediction: <|code_start|>#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class TestEventAlarmEvaluationService(tests_base.BaseTestCase):
def setUp(self):
super(TestEventAlarmEvaluationService, self).setUp()
conf = service.prepare_service(argv=[], config_files=[])
self.CONF = self.useFixture(fixture_config.Config(conf)).conf
self.CONF.set_override("batch_size", 2, 'listener')
self.setup_messaging(self.CONF)
@mock.patch('aodh.storage.get_connection_from_config',
mock.MagicMock())
@mock.patch('aodh.event.EventAlarmEndpoint.sample')
def test_batch_event_listener(self, mocked):
msg_notifier = oslo_messaging.Notifier(
self.transport, topics=['alarm.all'], driver='messaging',
publisher_id='test-publisher')
<|code_end|>
. Use current file imports:
(import time
import oslo_messaging
from unittest import mock
from oslo_config import fixture as fixture_config
from aodh import event
from aodh import service
from aodh.tests import base as tests_base)
and context including class names, function names, or small code snippets from other files:
# Path: aodh/event.py
# LOG = log.getLogger(__name__)
# OPTS = [
# cfg.StrOpt('event_alarm_topic',
# default='alarm.all',
# deprecated_group='DEFAULT',
# help='The topic that aodh uses for event alarm evaluation.'),
# cfg.IntOpt('batch_size',
# default=1,
# help='Number of notification messages to wait before '
# 'dispatching them.'),
# cfg.IntOpt('batch_timeout',
# help='Number of seconds to wait before dispatching samples '
# 'when batch_size is not reached (None means indefinitely).'),
# ]
# class EventAlarmEndpoint(object):
# class EventAlarmEvaluationService(cotyledon.Service):
# def __init__(self, evaluator):
# def sample(self, notifications):
# def __init__(self, worker_id, conf):
# def terminate(self):
#
# Path: aodh/service.py
# OPTS = [
# cfg.IntOpt('http_timeout',
# default=600,
# help='Timeout seconds for HTTP requests. Set it to None to '
# 'disable timeout.'),
# cfg.IntOpt('evaluation_interval',
# default=60,
# help='Period of evaluation cycle, should'
# ' be >= than configured pipeline interval for'
# ' collection of underlying meters.'),
# ]
# EVALUATOR_OPTS = [
# cfg.IntOpt('workers',
# default=1,
# min=1,
# help='Number of workers for evaluator service. '
# 'default value is 1.')
# ]
# NOTIFIER_OPTS = [
# cfg.IntOpt('workers',
# default=1,
# min=1,
# help='Number of workers for notifier service. '
# 'default value is 1.')
# ]
# LISTENER_OPTS = [
# cfg.IntOpt('workers',
# default=1,
# min=1,
# help='Number of workers for listener service. '
# 'default value is 1.')
# ]
# def prepare_service(argv=None, config_files=None):
#
# Path: aodh/tests/base.py
# class BaseTestCase(base.BaseTestCase):
# class SkipNotImplementedMeta(type):
# def setup_messaging(self, conf, exchange=None):
# def assertTimestampEqual(self, first, second, msg=None):
# def assertIsEmpty(self, obj):
# def assertIsNotEmpty(self, obj):
# def assertDictContains(self, parent, child):
# def path_get(project_file=None):
# def assert_single_item(self, items, **filters):
# def assert_multiple_items(self, items, count, **filters):
# def _matches(item, **props):
# def _skip_decorator(func):
# def skip_if_not_implemented(*args, **kwargs):
# def __new__(cls, name, bases, local):
. Output only the next line. | received_events = [] |
Given the code snippet: <|code_start|>#
# Copyright 2015 NEC Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class TestEventAlarmEvaluationService(tests_base.BaseTestCase):
def setUp(self):
super(TestEventAlarmEvaluationService, self).setUp()
conf = service.prepare_service(argv=[], config_files=[])
self.CONF = self.useFixture(fixture_config.Config(conf)).conf
self.CONF.set_override("batch_size", 2, 'listener')
self.setup_messaging(self.CONF)
<|code_end|>
, generate the next line using the imports in this file:
import time
import oslo_messaging
from unittest import mock
from oslo_config import fixture as fixture_config
from aodh import event
from aodh import service
from aodh.tests import base as tests_base
and context (functions, classes, or occasionally code) from other files:
# Path: aodh/event.py
# LOG = log.getLogger(__name__)
# OPTS = [
# cfg.StrOpt('event_alarm_topic',
# default='alarm.all',
# deprecated_group='DEFAULT',
# help='The topic that aodh uses for event alarm evaluation.'),
# cfg.IntOpt('batch_size',
# default=1,
# help='Number of notification messages to wait before '
# 'dispatching them.'),
# cfg.IntOpt('batch_timeout',
# help='Number of seconds to wait before dispatching samples '
# 'when batch_size is not reached (None means indefinitely).'),
# ]
# class EventAlarmEndpoint(object):
# class EventAlarmEvaluationService(cotyledon.Service):
# def __init__(self, evaluator):
# def sample(self, notifications):
# def __init__(self, worker_id, conf):
# def terminate(self):
#
# Path: aodh/service.py
# OPTS = [
# cfg.IntOpt('http_timeout',
# default=600,
# help='Timeout seconds for HTTP requests. Set it to None to '
# 'disable timeout.'),
# cfg.IntOpt('evaluation_interval',
# default=60,
# help='Period of evaluation cycle, should'
# ' be >= than configured pipeline interval for'
# ' collection of underlying meters.'),
# ]
# EVALUATOR_OPTS = [
# cfg.IntOpt('workers',
# default=1,
# min=1,
# help='Number of workers for evaluator service. '
# 'default value is 1.')
# ]
# NOTIFIER_OPTS = [
# cfg.IntOpt('workers',
# default=1,
# min=1,
# help='Number of workers for notifier service. '
# 'default value is 1.')
# ]
# LISTENER_OPTS = [
# cfg.IntOpt('workers',
# default=1,
# min=1,
# help='Number of workers for listener service. '
# 'default value is 1.')
# ]
# def prepare_service(argv=None, config_files=None):
#
# Path: aodh/tests/base.py
# class BaseTestCase(base.BaseTestCase):
# class SkipNotImplementedMeta(type):
# def setup_messaging(self, conf, exchange=None):
# def assertTimestampEqual(self, first, second, msg=None):
# def assertIsEmpty(self, obj):
# def assertIsNotEmpty(self, obj):
# def assertDictContains(self, parent, child):
# def path_get(project_file=None):
# def assert_single_item(self, items, **filters):
# def assert_multiple_items(self, items, count, **filters):
# def _matches(item, **props):
# def _skip_decorator(func):
# def skip_if_not_implemented(*args, **kwargs):
# def __new__(cls, name, bases, local):
. Output only the next line. | @mock.patch('aodh.storage.get_connection_from_config', |
Next line prediction: <|code_start|>
# consistency
num_nodes += 1
nodes.append(str(num_nodes + 1))
hr = coordination.HashRing(nodes)
for k in range(num_keys):
n = int(hr.get_node(str(k)))
assignments[k] -= n
reassigned = len([c for c in assignments if c != 0])
self.assertLess(reassigned, num_keys / num_nodes)
class TestPartitioning(base.BaseTestCase):
def setUp(self):
super(TestPartitioning, self).setUp()
conf = service.prepare_service(argv=[], config_files=[])
self.CONF = self.useFixture(fixture_config.Config(conf)).conf
self.shared_storage = {}
def _get_new_started_coordinator(self, shared_storage, agent_id=None,
coordinator_cls=None):
coordinator_cls = coordinator_cls or MockToozCoordinator
self.CONF.set_override('backend_url', 'xxx://yyy',
group='coordination')
with mock.patch('tooz.coordination.get_coordinator',
lambda _, member_id:
coordinator_cls(member_id, shared_storage)):
pc = coordination.PartitionCoordinator(self.CONF, agent_id)
pc.start()
<|code_end|>
. Use current file imports:
(from unittest import mock
from oslo_config import fixture as fixture_config
from aodh import coordination
from aodh import service
from aodh.tests import base
import tooz.coordination)
and context including class names, function names, or small code snippets from other files:
# Path: aodh/coordination.py
# LOG = log.getLogger(__name__)
# OPTS = [
# cfg.StrOpt('backend_url',
# help='The backend URL to use for distributed coordination. If '
# 'left empty, alarm evaluation won\'t do workload '
# 'partitioning and will only function correctly if a '
# 'single instance of the service is running.'),
# cfg.FloatOpt('heartbeat_interval',
# default=1.0,
# deprecated_name='heartbeat',
# help='Number of seconds between heartbeats for distributed '
# 'coordination.'),
# cfg.FloatOpt('check_watchers',
# default=10.0,
# deprecated_for_removal=True,
# deprecated_reason='This parameter is no longer used.',
# help='Number of seconds between checks to see if group '
# 'membership has changed'),
# cfg.IntOpt('retry_backoff',
# default=1,
# help='Retry backoff factor when retrying to connect with'
# ' coordination backend'),
# cfg.IntOpt('max_retry_interval',
# default=30,
# help='Maximum number of seconds between retry to join '
# 'partitioning group')
# ]
# class ErrorJoiningPartitioningGroup(Exception):
# class MemberNotInGroupError(Exception):
# class HashRing(object):
# class PartitionCoordinator(object):
# def __init__(self):
# def __init__(self, group_id, members, my_id):
# def __init__(self, nodes, replicas=100):
# def _hash(key):
# def _get_position_on_ring(self, key):
# def get_node(self, key):
# def __init__(self, conf, my_id=None):
# def start(self):
# def stop(self):
# def is_active(self):
# def heartbeat(self):
# def join_group(self, group_id):
# def _inner():
# def leave_group(self, group_id):
# def _get_members(self, group_id):
# def extract_my_subset(self, group_id, universal_set):
#
# Path: aodh/service.py
# OPTS = [
# cfg.IntOpt('http_timeout',
# default=600,
# help='Timeout seconds for HTTP requests. Set it to None to '
# 'disable timeout.'),
# cfg.IntOpt('evaluation_interval',
# default=60,
# help='Period of evaluation cycle, should'
# ' be >= than configured pipeline interval for'
# ' collection of underlying meters.'),
# ]
# EVALUATOR_OPTS = [
# cfg.IntOpt('workers',
# default=1,
# min=1,
# help='Number of workers for evaluator service. '
# 'default value is 1.')
# ]
# NOTIFIER_OPTS = [
# cfg.IntOpt('workers',
# default=1,
# min=1,
# help='Number of workers for notifier service. '
# 'default value is 1.')
# ]
# LISTENER_OPTS = [
# cfg.IntOpt('workers',
# default=1,
# min=1,
# help='Number of workers for listener service. '
# 'default value is 1.')
# ]
# def prepare_service(argv=None, config_files=None):
#
# Path: aodh/tests/base.py
# class BaseTestCase(base.BaseTestCase):
# class SkipNotImplementedMeta(type):
# def setup_messaging(self, conf, exchange=None):
# def assertTimestampEqual(self, first, second, msg=None):
# def assertIsEmpty(self, obj):
# def assertIsNotEmpty(self, obj):
# def assertDictContains(self, parent, child):
# def path_get(project_file=None):
# def assert_single_item(self, items, **filters):
# def assert_multiple_items(self, items, count, **filters):
# def _matches(item, **props):
# def _skip_decorator(func):
# def skip_if_not_implemented(*args, **kwargs):
# def __new__(cls, name, bases, local):
. Output only the next line. | return pc |
Given the code snippet: <|code_start|> def test_reconnect(self, mock_info, mocked_exception):
coord = self._get_new_started_coordinator({}, 'a',
MockToozCoordExceptionRaiser)
with mock.patch('tooz.coordination.get_coordinator',
return_value=MockToozCoordExceptionRaiser('a', {})):
coord.heartbeat()
called = [mock.call(u'Error connecting to coordination backend.'),
mock.call(u'Error connecting to coordination backend.'),
mock.call(u'Error sending a heartbeat to coordination '
u'backend.')]
self.assertEqual(called, mocked_exception.call_args_list)
with mock.patch('tooz.coordination.get_coordinator',
return_value=MockToozCoordinator('a', {})):
coord.heartbeat()
mock_info.assert_called_with(u'Coordination backend started '
u'successfully.')
def test_group_id_none(self):
coord = self._get_new_started_coordinator({}, 'a')
self.assertTrue(coord._coordinator.is_started)
with mock.patch.object(coord._coordinator, 'join_group') as mocked:
coord.join_group(None)
self.assertEqual(0, mocked.call_count)
with mock.patch.object(coord._coordinator, 'leave_group') as mocked:
coord.leave_group(None)
self.assertEqual(0, mocked.call_count)
def test_stop(self):
coord = self._get_new_started_coordinator({}, 'a')
<|code_end|>
, generate the next line using the imports in this file:
from unittest import mock
from oslo_config import fixture as fixture_config
from aodh import coordination
from aodh import service
from aodh.tests import base
import tooz.coordination
and context (functions, classes, or occasionally code) from other files:
# Path: aodh/coordination.py
# LOG = log.getLogger(__name__)
# OPTS = [
# cfg.StrOpt('backend_url',
# help='The backend URL to use for distributed coordination. If '
# 'left empty, alarm evaluation won\'t do workload '
# 'partitioning and will only function correctly if a '
# 'single instance of the service is running.'),
# cfg.FloatOpt('heartbeat_interval',
# default=1.0,
# deprecated_name='heartbeat',
# help='Number of seconds between heartbeats for distributed '
# 'coordination.'),
# cfg.FloatOpt('check_watchers',
# default=10.0,
# deprecated_for_removal=True,
# deprecated_reason='This parameter is no longer used.',
# help='Number of seconds between checks to see if group '
# 'membership has changed'),
# cfg.IntOpt('retry_backoff',
# default=1,
# help='Retry backoff factor when retrying to connect with'
# ' coordination backend'),
# cfg.IntOpt('max_retry_interval',
# default=30,
# help='Maximum number of seconds between retry to join '
# 'partitioning group')
# ]
# class ErrorJoiningPartitioningGroup(Exception):
# class MemberNotInGroupError(Exception):
# class HashRing(object):
# class PartitionCoordinator(object):
# def __init__(self):
# def __init__(self, group_id, members, my_id):
# def __init__(self, nodes, replicas=100):
# def _hash(key):
# def _get_position_on_ring(self, key):
# def get_node(self, key):
# def __init__(self, conf, my_id=None):
# def start(self):
# def stop(self):
# def is_active(self):
# def heartbeat(self):
# def join_group(self, group_id):
# def _inner():
# def leave_group(self, group_id):
# def _get_members(self, group_id):
# def extract_my_subset(self, group_id, universal_set):
#
# Path: aodh/service.py
# OPTS = [
# cfg.IntOpt('http_timeout',
# default=600,
# help='Timeout seconds for HTTP requests. Set it to None to '
# 'disable timeout.'),
# cfg.IntOpt('evaluation_interval',
# default=60,
# help='Period of evaluation cycle, should'
# ' be >= than configured pipeline interval for'
# ' collection of underlying meters.'),
# ]
# EVALUATOR_OPTS = [
# cfg.IntOpt('workers',
# default=1,
# min=1,
# help='Number of workers for evaluator service. '
# 'default value is 1.')
# ]
# NOTIFIER_OPTS = [
# cfg.IntOpt('workers',
# default=1,
# min=1,
# help='Number of workers for notifier service. '
# 'default value is 1.')
# ]
# LISTENER_OPTS = [
# cfg.IntOpt('workers',
# default=1,
# min=1,
# help='Number of workers for listener service. '
# 'default value is 1.')
# ]
# def prepare_service(argv=None, config_files=None):
#
# Path: aodh/tests/base.py
# class BaseTestCase(base.BaseTestCase):
# class SkipNotImplementedMeta(type):
# def setup_messaging(self, conf, exchange=None):
# def assertTimestampEqual(self, first, second, msg=None):
# def assertIsEmpty(self, obj):
# def assertIsNotEmpty(self, obj):
# def assertDictContains(self, parent, child):
# def path_get(project_file=None):
# def assert_single_item(self, items, **filters):
# def assert_multiple_items(self, items, count, **filters):
# def _matches(item, **props):
# def _skip_decorator(func):
# def skip_if_not_implemented(*args, **kwargs):
# def __new__(cls, name, bases, local):
. Output only the next line. | self.assertTrue(coord._coordinator.is_started) |
Here is a snippet: <|code_start|> group='coordination')
with mock.patch('tooz.coordination.get_coordinator',
lambda _, member_id:
coordinator_cls(member_id, shared_storage)):
pc = coordination.PartitionCoordinator(self.CONF, agent_id)
pc.start()
return pc
def _usage_simulation(self, *agents_kwargs):
partition_coordinators = []
for kwargs in agents_kwargs:
partition_coordinator = self._get_new_started_coordinator(
self.shared_storage, kwargs['agent_id'], kwargs.get(
'coordinator_cls'))
partition_coordinator.join_group(kwargs['group_id'])
partition_coordinators.append(partition_coordinator)
for i, kwargs in enumerate(agents_kwargs):
all_resources = kwargs.get('all_resources', [])
expected_resources = kwargs.get('expected_resources', [])
actual_resources = partition_coordinators[i].extract_my_subset(
kwargs['group_id'], all_resources)
self.assertEqual(expected_resources, actual_resources)
def test_single_group(self):
agents = [dict(agent_id='agent1', group_id='group'),
dict(agent_id='agent2', group_id='group')]
self._usage_simulation(*agents)
self.assertEqual(['group'], sorted(self.shared_storage.keys()))
<|code_end|>
. Write the next line using the current file imports:
from unittest import mock
from oslo_config import fixture as fixture_config
from aodh import coordination
from aodh import service
from aodh.tests import base
import tooz.coordination
and context from other files:
# Path: aodh/coordination.py
# LOG = log.getLogger(__name__)
# OPTS = [
# cfg.StrOpt('backend_url',
# help='The backend URL to use for distributed coordination. If '
# 'left empty, alarm evaluation won\'t do workload '
# 'partitioning and will only function correctly if a '
# 'single instance of the service is running.'),
# cfg.FloatOpt('heartbeat_interval',
# default=1.0,
# deprecated_name='heartbeat',
# help='Number of seconds between heartbeats for distributed '
# 'coordination.'),
# cfg.FloatOpt('check_watchers',
# default=10.0,
# deprecated_for_removal=True,
# deprecated_reason='This parameter is no longer used.',
# help='Number of seconds between checks to see if group '
# 'membership has changed'),
# cfg.IntOpt('retry_backoff',
# default=1,
# help='Retry backoff factor when retrying to connect with'
# ' coordination backend'),
# cfg.IntOpt('max_retry_interval',
# default=30,
# help='Maximum number of seconds between retry to join '
# 'partitioning group')
# ]
# class ErrorJoiningPartitioningGroup(Exception):
# class MemberNotInGroupError(Exception):
# class HashRing(object):
# class PartitionCoordinator(object):
# def __init__(self):
# def __init__(self, group_id, members, my_id):
# def __init__(self, nodes, replicas=100):
# def _hash(key):
# def _get_position_on_ring(self, key):
# def get_node(self, key):
# def __init__(self, conf, my_id=None):
# def start(self):
# def stop(self):
# def is_active(self):
# def heartbeat(self):
# def join_group(self, group_id):
# def _inner():
# def leave_group(self, group_id):
# def _get_members(self, group_id):
# def extract_my_subset(self, group_id, universal_set):
#
# Path: aodh/service.py
# OPTS = [
# cfg.IntOpt('http_timeout',
# default=600,
# help='Timeout seconds for HTTP requests. Set it to None to '
# 'disable timeout.'),
# cfg.IntOpt('evaluation_interval',
# default=60,
# help='Period of evaluation cycle, should'
# ' be >= than configured pipeline interval for'
# ' collection of underlying meters.'),
# ]
# EVALUATOR_OPTS = [
# cfg.IntOpt('workers',
# default=1,
# min=1,
# help='Number of workers for evaluator service. '
# 'default value is 1.')
# ]
# NOTIFIER_OPTS = [
# cfg.IntOpt('workers',
# default=1,
# min=1,
# help='Number of workers for notifier service. '
# 'default value is 1.')
# ]
# LISTENER_OPTS = [
# cfg.IntOpt('workers',
# default=1,
# min=1,
# help='Number of workers for listener service. '
# 'default value is 1.')
# ]
# def prepare_service(argv=None, config_files=None):
#
# Path: aodh/tests/base.py
# class BaseTestCase(base.BaseTestCase):
# class SkipNotImplementedMeta(type):
# def setup_messaging(self, conf, exchange=None):
# def assertTimestampEqual(self, first, second, msg=None):
# def assertIsEmpty(self, obj):
# def assertIsNotEmpty(self, obj):
# def assertDictContains(self, parent, child):
# def path_get(project_file=None):
# def assert_single_item(self, items, **filters):
# def assert_multiple_items(self, items, count, **filters):
# def _matches(item, **props):
# def _skip_decorator(func):
# def skip_if_not_implemented(*args, **kwargs):
# def __new__(cls, name, bases, local):
, which may include functions, classes, or code. Output only the next line. | self.assertEqual(['agent1', 'agent2'], |
Here is a snippet: <|code_start|> # YAML is a superset of JSON so we should be able to load
# each line of the log file and pull the object out of with
# Because of the way syslog-ng logs JSON, we can't simply
# grab the entire thing because there isn't array markers in
# the file ..,
for log in args.logs:
log_upload = SyslogUploadMessage(ndr_config)
try:
with open(log, 'r') as f:
for line in f:
# It's also possible we can get a bad entry. In that case, skip it and report
# it into the log. It will get dumped into syslog_upload's syslog error log
# for autospy
try:
yaml_line = yaml.safe_load(line)
entry = SyslogEntry.from_dict(yaml_line)
if entry == None: # Was a newline, discarding
continue
log_upload.add_entry(entry)
except ValueError:
logger.error("failed to parse %s in %s", line, log)
except:
logger.error("cataphoric error %s %s %s", log, line, sys.exc_info()[0])
# With everything loaded, queue the magic
log_upload.sign_report()
log_upload.load_into_queue()
<|code_end|>
. Write the next line using the current file imports:
import sys
import argparse
import yaml
import ndr
from ndr import SyslogUploadMessage, SyslogEntry
and context from other files:
# Path: ndr/syslog.py
# class SyslogEntry():
# def __init__(self):
# self.timestamp = None
# self.program = None
# self.priority = None
# self.pid = None
# self.message = None
# self.facility = None
# self.host = None
#
# @classmethod
# def from_dict(cls, syslog_dict):
# '''Attempts to convert a YAML dict tree into a SyslogEntry.
#
# Because messages can be malformed or otherwise bad, this returns a KeyError
# if the values in the dict are missing. A special case is made for Message
# being empty. This represents a newline in the syslog, and we simply return
# None and discard it'''
#
# message = SyslogEntry()
# message.program = syslog_dict['program']
# message.priority = SyslogPriorities(syslog_dict['priority'])
#
# # PID will not always be present, set to none if its MIA
# if "pid" in syslog_dict:
# message.pid = int(syslog_dict["pid"])
# else:
# message.pid = None
#
# # Message can be blank, representing a newline. Toss it.
# if "message" not in syslog_dict:
# return None
#
# message.message = syslog_dict['message']
# message.timestamp = syslog_dict['timestamp']
#
# message.facility = SyslogFacilities(syslog_dict['facility'])
# message.host = syslog_dict['host']
# return message
#
# def to_dict(self):
# '''Returns a YAML structure of the entry as per the standardized YAML specification
# used by NDR'''
#
# syslog_dict = {}
# syslog_dict['program'] = self.program
# syslog_dict['priority'] = self.priority.value
# if self.pid:
# syslog_dict['pid'] = self.pid
# syslog_dict['message'] = self.message
#
# syslog_dict['timestamp'] = self.timestamp
# syslog_dict['facility'] = self.facility.value
# syslog_dict['host'] = self.host
#
# return syslog_dict
#
# class SyslogUploadMessage(IngestMessage, Syslog):
# '''Uploads logs from syslog formatted in a JSON manner.
#
# This class depends on syslog-ng's output to be formatted in JSON-formatted with the following
# fields present
# - program
# - priority
# - pid
# - message
# - facility
# - date
#
# syslog-ng formats one JSON object per line, these must be deserialized on a line-by-line
# basis'''
#
# def __init__(self, config=None):
# IngestMessage.__init__(
# self, config, IngestMessageTypes.SYSLOG_UPLOAD)
# Syslog.__init__(self)
#
# def from_message(self, ingest_msg: IngestMessage):
# '''Converts an ingest message to a syslog message'''
# super().from_message(ingest_msg)
#
# # Now we need to deserialize the payload
# for log in self.headers['payload']:
# for log_entry in log['log']:
# self.syslog_entries.append(SyslogEntry.from_dict(log_entry))
# return self
#
# def create_report(self):
# syslog_dicts = []
# for log_entry in self.syslog_entries:
# syslog_dicts.append(log_entry.to_dict())
#
# self.add_header('payload', [{'log': syslog_dicts}])
# super().create_report()
, which may include functions, classes, or code. Output only the next line. | finally: |
Next line prediction: <|code_start|> # pop a message off for each log
# YAML is a superset of JSON so we should be able to load
# each line of the log file and pull the object out of with
# Because of the way syslog-ng logs JSON, we can't simply
# grab the entire thing because there isn't array markers in
# the file ..,
for log in args.logs:
log_upload = SyslogUploadMessage(ndr_config)
try:
with open(log, 'r') as f:
for line in f:
# It's also possible we can get a bad entry. In that case, skip it and report
# it into the log. It will get dumped into syslog_upload's syslog error log
# for autospy
try:
yaml_line = yaml.safe_load(line)
entry = SyslogEntry.from_dict(yaml_line)
if entry == None: # Was a newline, discarding
continue
log_upload.add_entry(entry)
except ValueError:
logger.error("failed to parse %s in %s", line, log)
except:
logger.error("cataphoric error %s %s %s", log, line, sys.exc_info()[0])
# With everything loaded, queue the magic
log_upload.sign_report()
<|code_end|>
. Use current file imports:
(import sys
import argparse
import yaml
import ndr
from ndr import SyslogUploadMessage, SyslogEntry)
and context including class names, function names, or small code snippets from other files:
# Path: ndr/syslog.py
# class SyslogEntry():
# def __init__(self):
# self.timestamp = None
# self.program = None
# self.priority = None
# self.pid = None
# self.message = None
# self.facility = None
# self.host = None
#
# @classmethod
# def from_dict(cls, syslog_dict):
# '''Attempts to convert a YAML dict tree into a SyslogEntry.
#
# Because messages can be malformed or otherwise bad, this returns a KeyError
# if the values in the dict are missing. A special case is made for Message
# being empty. This represents a newline in the syslog, and we simply return
# None and discard it'''
#
# message = SyslogEntry()
# message.program = syslog_dict['program']
# message.priority = SyslogPriorities(syslog_dict['priority'])
#
# # PID will not always be present, set to none if its MIA
# if "pid" in syslog_dict:
# message.pid = int(syslog_dict["pid"])
# else:
# message.pid = None
#
# # Message can be blank, representing a newline. Toss it.
# if "message" not in syslog_dict:
# return None
#
# message.message = syslog_dict['message']
# message.timestamp = syslog_dict['timestamp']
#
# message.facility = SyslogFacilities(syslog_dict['facility'])
# message.host = syslog_dict['host']
# return message
#
# def to_dict(self):
# '''Returns a YAML structure of the entry as per the standardized YAML specification
# used by NDR'''
#
# syslog_dict = {}
# syslog_dict['program'] = self.program
# syslog_dict['priority'] = self.priority.value
# if self.pid:
# syslog_dict['pid'] = self.pid
# syslog_dict['message'] = self.message
#
# syslog_dict['timestamp'] = self.timestamp
# syslog_dict['facility'] = self.facility.value
# syslog_dict['host'] = self.host
#
# return syslog_dict
#
# class SyslogUploadMessage(IngestMessage, Syslog):
# '''Uploads logs from syslog formatted in a JSON manner.
#
# This class depends on syslog-ng's output to be formatted in JSON-formatted with the following
# fields present
# - program
# - priority
# - pid
# - message
# - facility
# - date
#
# syslog-ng formats one JSON object per line, these must be deserialized on a line-by-line
# basis'''
#
# def __init__(self, config=None):
# IngestMessage.__init__(
# self, config, IngestMessageTypes.SYSLOG_UPLOAD)
# Syslog.__init__(self)
#
# def from_message(self, ingest_msg: IngestMessage):
# '''Converts an ingest message to a syslog message'''
# super().from_message(ingest_msg)
#
# # Now we need to deserialize the payload
# for log in self.headers['payload']:
# for log_entry in log['log']:
# self.syslog_entries.append(SyslogEntry.from_dict(log_entry))
# return self
#
# def create_report(self):
# syslog_dicts = []
# for log_entry in self.syslog_entries:
# syslog_dicts.append(log_entry.to_dict())
#
# self.add_header('payload', [{'log': syslog_dicts}])
# super().create_report()
. Output only the next line. | log_upload.load_into_queue() |
Given snippet: <|code_start|>
<table>
<elem key="value">CA:FALSE</elem>
<elem key="critical">true</elem>
<elem key="name">X509v3 Basic Constraints</elem>
</table>
<table>
<elem key="value">12:AA:04:F6:4F:A8:01:F4:2B:CF:A9:DE:88:D1:93:8C:37:F7:AD:3E</elem>
<elem key="name">X509v3 Subject Key Identifier</elem>
</table>
Not easy to worth with at all. At some point, we'll probably end up writing NSE probes that
can convert said information into something more useful using the script_id field as a keying
value and hoping for !collisions. Won't happen today though'''
def __init__(self, script_name, output):
self.script_name = script_name
self.output = output
self.elements = []
def to_dict(self):
script_output_dict = {}
script_output_dict['script_name'] = self.script_name
script_output_dict['output'] = self.output
if self.elements is not None and len(self.elements) != 0:
script_output_dict['elements'] = self.elements
return script_output_dict
@classmethod
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import ipaddress
import xml.etree.ElementTree as ET
import yaml
import ndr
import json
from enum import Enum
from ndr.utils import (set_dict_if_not_none,
set_value_if_dict_exists,
return_value_if_key_exists)
and context:
# Path: ndr/utils.py
# def set_dict_if_not_none(the_dict, key, value):
# '''Wrapper function for easing serialization pain'''
# if value is None:
# return
#
# the_dict[key] = value
#
# def set_value_if_dict_exists(the_dict, key):
# '''Wrapper function for deserialization code'''
# if key not in the_dict:
# return
#
# return the_dict[key]
#
# def return_value_if_key_exists(the_dict, key):
# '''Wrapper function for helping deserialization'''
# if key in the_dict:
# return the_dict[key]
#
# return None
which might include code, classes, or functions. Output only the next line. | def from_dict(cls, script_output_dict): |
Predict the next line for this snippet: <|code_start|> else:
return found_hosts
def find_by_ip(self, ip_str):
'''Takes an IPv4 or IPv6 address and tries to match that to a host
Returns NmapHost if found, else None
'''
ip_obj = ipaddress.ip_address(ip_str)
for host in self.hosts:
if host.addr == ip_obj:
return host
return None
def full_ip_and_mac_list(self):
'''Returns a list of all IP/MACs addresses found in this scan'''
full_ip_list = []
for host in self.hosts:
full_ip_list.append((host.addr, host.mac_address))
return full_ip_list
def mac_to_ip_dict(self):
'''Returns a dict of all MAC addresses seen within the scan and the addresses attached to
those MACs. Used by NetworkDelta detection'''
mac_to_ip = {}
<|code_end|>
with the help of current file imports:
import ipaddress
import xml.etree.ElementTree as ET
import yaml
import ndr
import json
from enum import Enum
from ndr.utils import (set_dict_if_not_none,
set_value_if_dict_exists,
return_value_if_key_exists)
and context from other files:
# Path: ndr/utils.py
# def set_dict_if_not_none(the_dict, key, value):
# '''Wrapper function for easing serialization pain'''
# if value is None:
# return
#
# the_dict[key] = value
#
# def set_value_if_dict_exists(the_dict, key):
# '''Wrapper function for deserialization code'''
# if key not in the_dict:
# return
#
# return the_dict[key]
#
# def return_value_if_key_exists(the_dict, key):
# '''Wrapper function for helping deserialization'''
# if key in the_dict:
# return the_dict[key]
#
# return None
, which may contain function names, class names, or code. Output only the next line. | for host in self.hosts: |
Given the following code snippet before the placeholder: <|code_start|> Returns NmapHost if found, else None
'''
ip_obj = ipaddress.ip_address(ip_str)
for host in self.hosts:
if host.addr == ip_obj:
return host
return None
def full_ip_and_mac_list(self):
'''Returns a list of all IP/MACs addresses found in this scan'''
full_ip_list = []
for host in self.hosts:
full_ip_list.append((host.addr, host.mac_address))
return full_ip_list
def mac_to_ip_dict(self):
'''Returns a dict of all MAC addresses seen within the scan and the addresses attached to
those MACs. Used by NetworkDelta detection'''
mac_to_ip = {}
for host in self.hosts:
if host.mac_address is None:
continue
mac = host.mac_address
<|code_end|>
, predict the next line using imports from the current file:
import ipaddress
import xml.etree.ElementTree as ET
import yaml
import ndr
import json
from enum import Enum
from ndr.utils import (set_dict_if_not_none,
set_value_if_dict_exists,
return_value_if_key_exists)
and context including class names, function names, and sometimes code from other files:
# Path: ndr/utils.py
# def set_dict_if_not_none(the_dict, key, value):
# '''Wrapper function for easing serialization pain'''
# if value is None:
# return
#
# the_dict[key] = value
#
# def set_value_if_dict_exists(the_dict, key):
# '''Wrapper function for deserialization code'''
# if key not in the_dict:
# return
#
# return the_dict[key]
#
# def return_value_if_key_exists(the_dict, key):
# '''Wrapper function for helping deserialization'''
# if key in the_dict:
# return the_dict[key]
#
# return None
. Output only the next line. | if mac not in mac_to_ip: |
Given the following code snippet before the placeholder: <|code_start|>#!/usr/bin/python3
# This file is part of NDR.
#
# Copyright (C) 2017 - Secured By THEM
# Original Author: Michael Casadevall <mcasadevall@them.com>
#
# NDR is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# NDR is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NDR. If not, see <http://www.gnu.org/licenses/>.
# Testing data from a live system running syslog-ng in JSON reporting mode
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
TEST_SYSLOG_DATA = THIS_DIR + '/data/test_log.json'
NDR_CONFIG = ndr.Config(THIS_DIR + '/data/test_config.yml')
<|code_end|>
, predict the next line using imports from the current file:
import unittest
import os
import ndr
import yaml
from ndr import SyslogUploadMessage, SyslogEntry
and context including class names, function names, and sometimes code from other files:
# Path: ndr/syslog.py
# class SyslogEntry():
# def __init__(self):
# self.timestamp = None
# self.program = None
# self.priority = None
# self.pid = None
# self.message = None
# self.facility = None
# self.host = None
#
# @classmethod
# def from_dict(cls, syslog_dict):
# '''Attempts to convert a YAML dict tree into a SyslogEntry.
#
# Because messages can be malformed or otherwise bad, this returns a KeyError
# if the values in the dict are missing. A special case is made for Message
# being empty. This represents a newline in the syslog, and we simply return
# None and discard it'''
#
# message = SyslogEntry()
# message.program = syslog_dict['program']
# message.priority = SyslogPriorities(syslog_dict['priority'])
#
# # PID will not always be present, set to none if its MIA
# if "pid" in syslog_dict:
# message.pid = int(syslog_dict["pid"])
# else:
# message.pid = None
#
# # Message can be blank, representing a newline. Toss it.
# if "message" not in syslog_dict:
# return None
#
# message.message = syslog_dict['message']
# message.timestamp = syslog_dict['timestamp']
#
# message.facility = SyslogFacilities(syslog_dict['facility'])
# message.host = syslog_dict['host']
# return message
#
# def to_dict(self):
# '''Returns a YAML structure of the entry as per the standardized YAML specification
# used by NDR'''
#
# syslog_dict = {}
# syslog_dict['program'] = self.program
# syslog_dict['priority'] = self.priority.value
# if self.pid:
# syslog_dict['pid'] = self.pid
# syslog_dict['message'] = self.message
#
# syslog_dict['timestamp'] = self.timestamp
# syslog_dict['facility'] = self.facility.value
# syslog_dict['host'] = self.host
#
# return syslog_dict
#
# class SyslogUploadMessage(IngestMessage, Syslog):
# '''Uploads logs from syslog formatted in a JSON manner.
#
# This class depends on syslog-ng's output to be formatted in JSON-formatted with the following
# fields present
# - program
# - priority
# - pid
# - message
# - facility
# - date
#
# syslog-ng formats one JSON object per line, these must be deserialized on a line-by-line
# basis'''
#
# def __init__(self, config=None):
# IngestMessage.__init__(
# self, config, IngestMessageTypes.SYSLOG_UPLOAD)
# Syslog.__init__(self)
#
# def from_message(self, ingest_msg: IngestMessage):
# '''Converts an ingest message to a syslog message'''
# super().from_message(ingest_msg)
#
# # Now we need to deserialize the payload
# for log in self.headers['payload']:
# for log_entry in log['log']:
# self.syslog_entries.append(SyslogEntry.from_dict(log_entry))
# return self
#
# def create_report(self):
# syslog_dicts = []
# for log_entry in self.syslog_entries:
# syslog_dicts.append(log_entry.to_dict())
#
# self.add_header('payload', [{'log': syslog_dicts}])
# super().create_report()
. Output only the next line. | class SyslogTest(unittest.TestCase): |
Based on the snippet: <|code_start|> self.assertTrue(np.allclose(hpscres, 0.))
print('\nHeun-Step:')
heunrhs = M * iniv - .5 * dt * \
(A * iniv + iniconvvec + hpconvvec) + dt * fv
matvp = M * cnhev + .5 * dt * A * cnhev - dt * JT * cnhep
hcscres = np.linalg.norm(matvp - heunrhs)
print('Scipy residual: ', hcscres)
# import ipdb; ipdb.set_trace()
curv, curp = dts.expand_vp_dolfunc(vc=cnhevwbcs, pc=cnhep, **femp)
heunres = heunres(curv, curp, dt, lastvel=inivelfun, othervel=hpvelfun)
hcfnres = np.linalg.norm(heunres.get_local()[invinds])
print('dolfin residua: ', hcfnres)
self.assertTrue(np.allclose(hcfnres, 0.))
self.assertTrue(np.allclose(hcscres, 0.))
print('\nAB2-Step:')
abtrhs = M * cnhev - .5 * dt * \
(A * cnhev + -iniconvvec + 3. * hcconvvec) + dt * fv
matvp = M * cnabv + .5 * dt * A * cnabv - dt * JT * cnabp
abscres = np.linalg.norm(matvp - abtrhs)
print('Scipy residual: ', abscres)
# import ipdb; ipdb.set_trace()
curv, curp = dts.expand_vp_dolfunc(vc=cnabvwbcs, pc=cnabp, **femp)
crnires = crnires(curv, curp, dt, lastvel=hcvelfun, othervel=inivelfun)
abfnres = np.linalg.norm(crnires.get_local()[invinds])
print('dolfin residua: ', abfnres)
<|code_end|>
, predict the immediate next line with the help of imports:
import unittest
import numpy as np
import dolfin_navier_scipy.stokes_navier_utils as snu
import dolfin_navier_scipy.problem_setups as dnsps
import dolfin_navier_scipy.dolfin_to_sparrays as dts
from dolfin_navier_scipy.residual_checks import get_imex_res
and context (classes, functions, sometimes code) from other files:
# Path: dolfin_navier_scipy/residual_checks.py
# def get_imex_res(V=None, outflowds=None, gradvsymmtrc=True, nu=None,
# implscheme='crni', explscheme='abtw'):
# """ define the residual for an IMEX/AB2 time discretization
#
# """
# if not implscheme == 'crni':
# raise NotImplementedError()
#
# if explscheme == 'abtw':
# def convform(cvo=None, cvt=None, phi=None):
# return (1.5*inner(dolfin.dot(cvo, nabla_grad(cvo)), phi)*dx -
# .5*inner(dolfin.dot(cvt, nabla_grad(cvt)), phi)*dx)
#
# elif explscheme == 'heun':
# def convform(cvo=None, cvt=None, phi=None):
# return (.5*inner(dolfin.dot(cvo, nabla_grad(cvo)), phi)*dx +
# .5*inner(dolfin.dot(cvt, nabla_grad(cvt)), phi)*dx)
#
# elif explscheme == 'eule':
# def convform(cvo=None, cvt=None, phi=None):
# return inner(dolfin.dot(cvo, nabla_grad(cvo)), phi)*dx
#
# def imex_res(vel, pres, dt, lastvel=None, othervel=None, phi=None):
# if phi is None:
# phi = dolfin.TestFunction(V)
#
# diffvel = .5*(vel+lastvel) # Crank-Nicolson
# diffrm = nu*inner(grad(diffvel)+grad(diffvel).T, grad(phi))*dx
# if gradvsymmtrc:
# nvec = dolfin.FacetNormal(V.mesh())
# diffrm = diffrm - (nu*inner(grad(diffvel).T*nvec, phi))*outflowds
# cnvfrm = convform(cvo=lastvel, cvt=othervel, phi=phi)
#
# pfrm = -inner(pres, div(phi))*dx
# dtprt = 1./dt*dolfin.assemble(inner(vel, phi)*dx) \
# - 1./dt*dolfin.assemble(inner(lastvel, phi)*dx)
# res = dolfin.assemble(diffrm+cnvfrm+pfrm) + dtprt
#
# # import numpy as np
# # nfc_c = dolfin.assemble(cnvfrm).get_local()
# # print(np.linalg.norm(nfc_c), nfc_c[0], nfc_c.size)
# # print('debggng')
# return res
#
# return imex_res
. Output only the next line. | self.assertTrue(np.allclose(abfnres, 0.)) |
Here is a snippet: <|code_start|> cdclfac = 2./(rho*L*Um**2)
print('Computed via testing the residual: ')
print('Cl: {0}'.format(cdclfac*lift))
print('Cd: {0}'.format(cdclfac*drag))
phionevec = np.zeros((femp['V'].dim(), 1))
phionevec[femp['ldsbcinds'], :] = 1.
phione = dolfin.Function(femp['V'])
phione.vector().set_local(phionevec)
# phionex = phione.sub(0)
print('Computed via `dnsps.LiftDragSurfForce`:')
realpss = rho*dynpss # Um**2*rho*dynpss
realvss = vss # Um*vss
getld = dnsps.LiftDragSurfForce(V=femp['V'], nu=nu,
ldds=femp['liftdragds'],
outflowds=femp['outflowds'],
phione=phione)
clift, cdrag = getld.evaliftdragforce(u=realvss, p=realpss)
print('Cl: {0}'.format(cdclfac*clift))
print('Cd: {0}'.format(cdclfac*cdrag))
a_1 = dolfin.Point(0.15, 0.2)
a_2 = dolfin.Point(0.25, 0.2)
pdiff = realpss(a_1) - realpss(a_2)
print('Delta P: {0}'.format(pdiff))
print('\n values from Schaefer/Turek as in')
print('www.featflow.de/en/benchmarks/cfdbenchmarking/flow/' +
<|code_end|>
. Write the next line using the current file imports:
import numpy as np
import dolfin
import dolfin_navier_scipy.stokes_navier_utils as snu
import dolfin_navier_scipy.dolfin_to_sparrays as dts
import dolfin_navier_scipy.problem_setups as dnsps
from dolfin_navier_scipy.residual_checks import get_steady_state_res
and context from other files:
# Path: dolfin_navier_scipy/residual_checks.py
# def get_steady_state_res(V=None, outflowds=None, gradvsymmtrc=True, nu=None):
#
# def steady_state_res(vel, pres, phi=None):
# if phi is None:
# phi = dolfin.TestFunction(V)
#
# cnvfrm = inner(dolfin.dot(vel, nabla_grad(vel)), phi)*dx
# diffrm = nu*inner(grad(vel)+grad(vel).T, grad(phi))*dx
# if gradvsymmtrc:
# nvec = dolfin.FacetNormal(V.mesh())
# diffrm = diffrm - (nu*inner(grad(vel).T*nvec, phi))*outflowds
#
# pfrm = inner(pres, div(phi))*dx
# res = dolfin.assemble(diffrm+cnvfrm-pfrm)
# return res
#
# return steady_state_res
, which may include functions, classes, or code. Output only the next line. | 'dfg_benchmark1_re20.html:') |
Predict the next line after this snippet: <|code_start|>
# from dolfin_navier_scipy.residual_checks import get_steady_state_res
def twod_simu(nu=None, charvel=None, rho=1., rhosolid=10., meshparams=None,
inirot=None, inivfun=None,
t0=0.0, tE=.1, Nts=1e2+1,
start_steadystate=False, ininu=None,
plotplease=False, proutdir='paraviewplots/',
return_final_vp=False, ParaviewOutput=False, scheme='TH'):
femp, stokesmatsc, rhsd = \
dnsps.get_sysmats(problem='gen_bccont', nu=nu, bccontrol=False,
charvel=charvel, scheme=scheme, mergerhs=True,
meshparams=meshparams)
# dnsps.get_sysmats(problem='cylinder_rot', nu=nu, bccontrol=False,
# charvel=charvel, scheme=scheme, mergerhs=True,
# meshparams=meshparams)
tips = dict(t0=t0, tE=tE, Nts=Nts)
NP, NV = stokesmatsc['J'].shape
print('NV + NP : {0} + {1} = {2}'.format(NV, NP, NV+NP))
# function of ones at the lift/drag boundary
<|code_end|>
using the current file's imports:
import numpy as np
import matplotlib.pyplot as plt
import json
import dolfin
import dolfin_navier_scipy.stokes_navier_utils as snu
import dolfin_navier_scipy.dolfin_to_sparrays as dts
import dolfin_navier_scipy.problem_setups as dnsps
from dolfin_navier_scipy.residual_checks import get_imex_res
and any relevant context from other files:
# Path: dolfin_navier_scipy/residual_checks.py
# def get_imex_res(V=None, outflowds=None, gradvsymmtrc=True, nu=None,
# implscheme='crni', explscheme='abtw'):
# """ define the residual for an IMEX/AB2 time discretization
#
# """
# if not implscheme == 'crni':
# raise NotImplementedError()
#
# if explscheme == 'abtw':
# def convform(cvo=None, cvt=None, phi=None):
# return (1.5*inner(dolfin.dot(cvo, nabla_grad(cvo)), phi)*dx -
# .5*inner(dolfin.dot(cvt, nabla_grad(cvt)), phi)*dx)
#
# elif explscheme == 'heun':
# def convform(cvo=None, cvt=None, phi=None):
# return (.5*inner(dolfin.dot(cvo, nabla_grad(cvo)), phi)*dx +
# .5*inner(dolfin.dot(cvt, nabla_grad(cvt)), phi)*dx)
#
# elif explscheme == 'eule':
# def convform(cvo=None, cvt=None, phi=None):
# return inner(dolfin.dot(cvo, nabla_grad(cvo)), phi)*dx
#
# def imex_res(vel, pres, dt, lastvel=None, othervel=None, phi=None):
# if phi is None:
# phi = dolfin.TestFunction(V)
#
# diffvel = .5*(vel+lastvel) # Crank-Nicolson
# diffrm = nu*inner(grad(diffvel)+grad(diffvel).T, grad(phi))*dx
# if gradvsymmtrc:
# nvec = dolfin.FacetNormal(V.mesh())
# diffrm = diffrm - (nu*inner(grad(diffvel).T*nvec, phi))*outflowds
# cnvfrm = convform(cvo=lastvel, cvt=othervel, phi=phi)
#
# pfrm = -inner(pres, div(phi))*dx
# dtprt = 1./dt*dolfin.assemble(inner(vel, phi)*dx) \
# - 1./dt*dolfin.assemble(inner(lastvel, phi)*dx)
# res = dolfin.assemble(diffrm+cnvfrm+pfrm) + dtprt
#
# # import numpy as np
# # nfc_c = dolfin.assemble(cnvfrm).get_local()
# # print(np.linalg.norm(nfc_c), nfc_c[0], nfc_c.size)
# # print('debggng')
# return res
#
# return imex_res
. Output only the next line. | phionevec = np.zeros((femp['V'].dim(), 1)) |
Predict the next line after this snippet: <|code_start|> data_prfx = problem + '{2}_mesh{0}_Re{1}'.\
format(meshlvl, femp['Re'], scheme)
soldict.update(fv=rhsd['fv'], fp=rhsd['fp'],
N=meshlvl, nu=nu,
verbose=True,
vel_pcrd_stps=0,
vel_nwtn_tol=1e-10,
vel_nwtn_stps=10,
return_vp=True,
get_datastring=None,
dbcinds=femp['dbcinds'], dbcvals=femp['dbcvals'],
data_prfx=ddir+data_prfx,
paraviewoutput=ParaviewOutput,
vfileprfx=proutdir+'vel_',
pfileprfx=proutdir+'p_')
L = femp['charlen'] # characteristic length
phionevec = np.zeros((femp['V'].dim(), 1))
phionevec[femp['mvwbcinds'], :] = 1.
phione = dolfin.Function(femp['V'])
phione.vector().set_local(phionevec)
pickx = dolfin.as_matrix([[1., 0.], [0., 0.]])
picky = dolfin.as_matrix([[0., 0.], [0., 1.]])
pox = pickx*phione
poy = picky*phione
phitwovec = np.zeros((femp['V'].dim(), 1))
phitwovec[femp['mvwbcinds'], 0] = femp['mvwbcvals']
phitwo = dolfin.Function(femp['V'])
phitwo.vector().set_local(phitwovec)
<|code_end|>
using the current file's imports:
import numpy as np
import scipy.optimize as sco
import dolfin
import dolfin_navier_scipy.stokes_navier_utils as snu
import dolfin_navier_scipy.dolfin_to_sparrays as dts
import dolfin_navier_scipy.problem_setups as dnsps
import dolfin_navier_scipy.data_output_utils as dou
from dolfin_navier_scipy.residual_checks import get_steady_state_res
and any relevant context from other files:
# Path: dolfin_navier_scipy/residual_checks.py
# def get_steady_state_res(V=None, outflowds=None, gradvsymmtrc=True, nu=None):
#
# def steady_state_res(vel, pres, phi=None):
# if phi is None:
# phi = dolfin.TestFunction(V)
#
# cnvfrm = inner(dolfin.dot(vel, nabla_grad(vel)), phi)*dx
# diffrm = nu*inner(grad(vel)+grad(vel).T, grad(phi))*dx
# if gradvsymmtrc:
# nvec = dolfin.FacetNormal(V.mesh())
# diffrm = diffrm - (nu*inner(grad(vel).T*nvec, phi))*outflowds
#
# pfrm = inner(pres, div(phi))*dx
# res = dolfin.assemble(diffrm+cnvfrm-pfrm)
# return res
#
# return steady_state_res
. Output only the next line. | if ParaviewOutput: |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.