text stringlengths 0 1.05M | meta dict |
|---|---|
from frasco import Feature, action, OptionMissingError, AttrDict, current_app
from PIL import Image
import os
class ImagesFeature(Feature):
name = "images"
defaults = {"search_dir": None,
"dest_dir": None,
"output_format": None}
def init_app(self, app):
default_dir = None
if "forms" in app.features:
default_dir = app.features.forms.options["upload_dir"]
else:
default_dir = app.static_folder
if not self.options["search_dir"]:
self.options["search_dir"] = default_dir
if not self.options["dest_dir"]:
self.options["dest_dir"] = default_dir
def get_path(self, filename):
if os.path.isabs(filename):
return filename
sdir = self.options["search_dir"]
if sdir:
f = os.path.join(sdir, filename)
if os.path.exists(f):
return f
return filename
def get_dest_filename(self, path, opts, default="{path}{prefix}{name}{suffix}{ext}", **kwargs):
dest = opts.get("dest", default)
dest_dir = opts.get("dest_dir", self.options["dest_dir"])
if "{" in dest:
filename, ext = os.path.splitext(path)
data = dict(path=os.path.dirname(filename), name=os.path.basename(filename),
filename=filename, ext=ext, prefix="", suffix="")
data.update(kwargs)
data.update(dict((k, opts[k]) for k in ("ext", "prefix", "suffix") if k in opts))
dest = dest.format(**data)
if not os.path.isabs(dest) and dest_dir:
return os.path.join(dest_dir, dest), dest
return dest, dest
def save_img(self, path, opts, img, **kwargs):
pathname, filename = self.get_dest_filename(path, opts, **kwargs)
img.save(pathname, opts.get("format", self.options["output_format"]))
return filename
def get_size(self, opts, lprefix="", sprefix="", ratio=None):
w = opts.get("%swidth" % lprefix, opts.get("%sw" % sprefix))
h = opts.get("%sheight" % lprefix, opts.get("%sh" % sprefix))
if ("%ssize" % lprefix) in opts:
w, h = map(int, opts["%ssize" % lprefix].split("x", 1))
if ((w is None or h is None) and not ratio) or (w is None and h is None):
raise OptionMissingError("Missing size options for image manipulation")
if w is None:
r = float(h) / float(ratio[1])
w = int(ratio[0] * r)
elif h is None:
r = float(w) / float(ratio[0])
h = int(ratio[1] * r)
return w, h
@action(default_option="path", as_="image")
def read_image(self, path):
img = Image(self.get_path(path))
return AttrDict(format=img.format, size=img.size, mode=img.mode)
@action("resize_image")
def resize(self, path, resample=Image.ANTIALIAS, **opts):
path = self.get_path(path)
img = Image.open(path)
keep_ratio = False
try:
size = self.get_size(opts)
except OptionMissingError:
size = self.get_size(opts, "max_", "m", ratio=img.size)
keep_ratio = True
if keep_ratio:
img.thumbnail(size, resample)
else:
img = img.resize(size, resample)
return self.save_img(path, opts, img, suffix="-%sx%s" % size)
@action("create_image_thumbnail", default_option="path")
def create_thumbnail(self, path, resample=Image.ANTIALIAS, **opts):
img = Image.open(self.get_path(path))
fixed_size = False
try:
size = self.get_size(opts)
fixed_size = True
except OptionMissingError:
size = self.get_size(opts, "max_", "m", ratio=img.size)
if fixed_size and size[0] < img.size[0] and size[1] < img.size[1]:
r = max(float(size[0]) / float(img.size[0]), float(size[1]) / float(img.size[1]))
isize = (int(img.size[0] * r), int(img.size[1] * r))
img = img.resize(isize, resample)
x = max((isize[0] - size[0]) / 2, 0)
y = max((isize[1] - size[1]) / 2, 0)
img = img.crop((x, y, size[0], size[1]))
else:
img.thumbnail(size, resample)
return self.save_img(path, opts, img, suffix="-thumb-%sx%s" % size)
@action("crop_image")
def crop(self, path, **opts):
box = opts.get("box")
if not box:
w = opts.get("width", opts.get("w"))
h = opts.get("height", opts.get("h"))
box = (opts.get("x", 0), opts.get("y", 0), w, h)
path = self.get_path(path)
img = Image.open(path)
img = img.crop(box)
return self.save_img(path, opts, img, suffix="-cropped")
@action("rotate_image")
def rotate(self, path, angle, resample=0, expand=0, **opts):
path = self.get_path(path)
img = Image(path)
img = img.rotate(float(angle), resample, expand)
return self.save_img(path, opts, img, suffix="-rotated")
@action("transpose_image")
def transpose(self, path, method, **opts):
mapping = {"flip_left_right": Image.FLIP_LEFT_RIGHT,
"flip_top_bottom": Image.FLIP_TOP_BOTTOM,
"rotate90": Image.ROTATE_90,
"rotate180": Image.ROTATE_180,
"rotate270": Image.ROTATE_270}
path = self.get_path(path)
img = Image.open(path)
img = img.transpose(mapping[method])
return self.save_img(path, opts, img, suffix="-" + method)
@action("add_image_watermark")
def add_watermark(self, path, watermark, **opts):
path = self.get_path(path)
img = Image.open(path)
wtmk = Image.open(watermark)
iw, ih = img.size
ww, wh = wtmk.size
pos = (opts.get("x", iw - ww), opts.get("y", ih - wh))
img.paste(wtmk, pos)
return self.save_img(path, opts, img, suffix="-watermark") | {
"repo_name": "frascoweb/frasco-images",
"path": "frasco_images.py",
"copies": "1",
"size": "5990",
"license": "mit",
"hash": -840452578093654100,
"line_mean": 37.6516129032,
"line_max": 99,
"alpha_frac": 0.5507512521,
"autogenerated": false,
"ratio": 3.4644303065355695,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9512544011867559,
"avg_score": 0.0005275093536023045,
"num_lines": 155
} |
from frasco import (Feature, action, request, signal, current_app, redirect, flash,\
url_for, cached_property, current_context, hook, Blueprint,\
lazy_translate, command, current_context)
from frasco.utils import unknown_value
from frasco_models import as_transaction, save_model
import stripe
import datetime
import time
import os
import json
bp = Blueprint('stripe', __name__)
@bp.route('/stripe-webhook', methods=['POST'])
def webhook():
if current_app.features.stripe.options['webhook_validate_event']:
event = stripe.Event.retrieve(request.json['id'])
else:
event = stripe.Event.construct_from(request.json,
current_app.features.stripe.options['api_key'])
signal_name = 'stripe_%s' % event.type.replace(".", "_")
signal(signal_name).send(stripe, stripe_event=event)
return 'ok'
class StripeFeature(Feature):
name = "stripe"
blueprints = [bp]
defaults = {"default_currency": None,
"enable_subscriptions": True,
"default_plan": None,
"auto_create_customer": True,
"must_have_plan": False,
"add_source_view": None,
"no_payment_redirect_to": None,
"no_payment_message": None,
"subscription_past_due_message": lazy_translate(
u"We attempted to charge your credit card for your subscription but it failed."
"Please check your credit card details"),
"debug_trial_period": None,
"send_trial_will_end_email": True,
"send_failed_invoice_email": True,
"create_charge_invoice": True,
"invoice_ref_kwargs": {},
"webhook_validate_event": False,
"model": None,
"email_attribute": "email",
"billing_fields": True,
"reset_billing_fields": True,
"default_subscription_tax_percent": None,
"eu_vat_support": None,
"eu_auto_vat_country": True,
"eu_vat_use_address_country": False,
"eu_auto_vat_rate": True,
"auto_assets": True}
model_source_updated_signal = signal('stripe_model_source_updated')
model_subscription_updated_signal = signal('stripe_model_subscription_updated')
model_last_charge_updated_signal = signal('stripe_model_last_charge_updated')
invoice_payment_signal = signal('stripe_invoice_payment')
def init_app(self, app):
stripe.api_key = self.options['api_key']
self.api = stripe
stripe_creatable_attributes = ('Customer', 'Plan', 'Coupon', 'Invoice',
'InvoiceItem', 'Transfer', 'Recipient')
stripe_attributes = stripe_creatable_attributes + \
('ApplicationFee', 'Account', 'Balance', 'Event', 'Token')
for attr in stripe_attributes:
setattr(self, attr, getattr(stripe, attr))
app.actions.register(action("stripe_retrieve_" + attr.lower(), as_=attr.lower())(getattr(stripe, attr).retrieve))
for attr in stripe_creatable_attributes:
app.actions.register(action("stripe_create_" + attr.lower(), as_=attr.lower())(getattr(stripe, attr).create))
if app.features.exists("emails"):
app.features.emails.add_templates_from_package(__name__)
if app.features.exists('assets'):
app.assets.register({'stripejs': ['https://js.stripe.com/v2/#.js']})
if self.options['auto_assets']:
app.features.assets.add_default("@stripejs")
if 'publishable_key' in self.options:
app.config['EXPORTED_JS_VARS']['STRIPE_PUBLISHABLE_KEY'] = self.options['publishable_key']
self.model_is_user = False
if self.options["model"] is None and app.features.exists('users'):
self.options["model"] = app.features.users.model
self.options["email_attribute"] = app.features.users.options['email_column']
self.model_is_user = True
self.model = None
if self.options["model"]:
self.model = app.features.models.ensure_model(self.options["model"],
stripe_customer_id=dict(type=str, index=True),
has_stripe_source=dict(type=bool, default=False, index=True))
self.model.stripe_customer = cached_property(self.find_model_customer)
signal('stripe_customer_source_updated').connect(self.on_source_event)
signal('stripe_customer_source_deleted').connect(self.on_source_event)
signal('stripe_invoice_payment_succeeded').connect(self.on_invoice_payment)
signal('stripe_invoice_payment_failed').connect(self.on_invoice_payment)
if self.options['enable_subscriptions']:
app.features.models.ensure_model(self.options["model"],
stripe_subscription_id=dict(type=str, index=True),
plan_name=dict(type=str, index=True),
plan_status=dict(type=str, default='trialing', index=True),
plan_last_charged_at=datetime.datetime,
plan_last_charge_amount=float,
plan_last_charge_successful=dict(type=bool, default=True, index=True),
plan_next_charge_at=dict(type=datetime.datetime, index=True))
self.model.stripe_subscription = cached_property(self.find_model_subscription)
self.model.stripe_default_source = cached_property(self.find_model_default_source)
signal('stripe_customer_subscription_updated').connect(self.on_subscription_event)
signal('stripe_customer_subscription_deleted').connect(self.on_subscription_event)
signal('stripe_customer_subscription_trial_will_end').connect(self.on_trial_will_end)
if self.options['billing_fields']:
app.features.models.ensure_model(self.model,
billing_name=str,
billing_address_line1=str,
billing_address_line2=str,
billing_address_city=str,
billing_address_state=str,
billing_address_zip=str,
billing_address_country=str,
billing_country=str,
billing_ip_address=str,
billing_brand=str,
billing_exp_month=str,
billing_exp_year=str,
billing_last4=str)
if not self.model_is_user:
self.model_is_user = self.model and app.features.exists('users') and self.model is app.features.users.model
if self.options['eu_vat_support'] is None:
self.options['eu_vat_support'] = app.features.exists('eu_vat')
if self.options['eu_vat_support']:
app.features.eu_vat.model_rate_updated_signal.connect(self.on_model_eu_vat_rate_update)
def find_model_by_customer_id(self, cust_id):
return current_app.features.models.query(self.model).filter(stripe_customer_id=cust_id).first()
def find_model_by_subscription_id(self, subscription_id):
return current_app.features.models.query(self.model).filter(stripe_subscription_id=subscription_id).first()
def find_model_customer(self, obj):
if not obj.stripe_customer_id:
return
try:
return stripe.Customer.retrieve(obj.stripe_customer_id)
except stripe.error.InvalidRequestError:
if self.options['auto_create_customer']:
return self.create_customer(obj, email=getattr(obj, self.options['email_attribute']))
return
def find_model_subscription(self, obj):
if not obj.stripe_customer_id or not obj.stripe_subscription_id:
return
try:
return obj.stripe_customer.subscriptions\
.retrieve(obj.stripe_subscription_id)
except stripe.error.InvalidRequestError:
return
def find_model_default_source(self, obj):
if not obj.stripe_customer_id:
return
default_id = obj.stripe_customer.default_source
if default_id:
return obj.stripe_customer.sources.retrieve(default_id)
def check_model(self, obj):
if not self.options['enable_subscriptions']:
return
if not obj.plan_name and self.options['must_have_plan'] and self.options['default_plan']:
self.subscribe_plan(obj, self.options['default_plan'])
if (not obj.plan_name and self.options['must_have_plan']) or \
(obj.plan_name and ((obj.plan_status not in ('trialing', 'active') and not obj.has_stripe_source) or \
obj.plan_status in ('canceled', 'unpaid'))):
if self.options['no_payment_message']:
flash(self.options['no_payment_message'], 'error')
if self.options['no_payment_redirect_to']:
return redirect(self.options['no_payment_redirect_to'])
if self.options['add_source_view']:
return redirect(url_for(self.options['add_source_view']))
if obj.plan_status == 'past_due' and self.options['subscription_past_due_message']:
flash(self.options['subscription_past_due_message'], 'error')
@hook()
def before_request(self):
if request.endpoint in (self.options['add_source_view'], 'users.logout') or 'static' in request.endpoint:
return
if current_app.features.users.logged_in() and self.model_is_user:
self.check_model(current_app.features.users.current)
@action('stripe_model_create_customer', default_option='obj', as_='stripe_customer')
@as_transaction
def create_customer(self, obj, trial_end=None, coupon=None, tax_percent=None, **kwargs):
if 'plan' in kwargs:
kwargs.update(dict(trial_end=self._format_trial_end(trial_end),
coupon=coupon, tax_percent=tax_percent))
cust = stripe.Customer.create(**kwargs)
self._update_model_customer(obj, cust)
if 'plan' in kwargs:
subscription = cust.subscriptions.data[0]
self._update_model_subscription(obj, subscription)
elif self.options['default_plan']:
self.subscribe_plan(obj, self.options['default_plan'], trial_end=trial_end,
coupon=coupon, tax_percent=tax_percent)
save_model(obj)
return cust
@action('stripe_model_update_customer')
@as_transaction
def update_customer(self, obj, **kwargs):
customer = obj.stripe_customer
for k, v in kwargs.iteritems():
setattr(customer, k, v)
customer.save()
self._update_model_customer(obj, customer)
save_model(obj)
@action('stripe_model_delete_customer')
@as_transaction
def delete_customer(self, obj, silent=True):
if obj.stripe_customer:
try:
obj.stripe_customer.delete()
except stripe.error.InvalidRequestError as e:
if not silent or 'No such customer' not in e.message:
raise e
self._update_model_customer(obj, None)
if obj.stripe_subscription_id:
self._update_model_subscription(obj, False)
save_model(obj)
def _update_model_customer(self, obj, cust):
obj.stripe_customer_id = cust.id if cust else None
obj.__dict__['stripe_customer'] = cust
self._update_model_source(obj, cust)
@action('stripe_model_add_source')
@as_transaction
def add_source(self, obj, token=None, **source_details):
obj.stripe_customer.sources.create(source=token or source_details)
obj.__dict__.pop('stripe_customer', None) # force refresh of customer object
self._update_model_source(obj)
save_model(obj)
@action('stripe_model_add_source_from_form')
def add_source_from_form(self, obj, form=None):
form = current_context.data.get('form')
if form and "stripeToken" in form:
self.add_source(obj, form.stripeToken.data)
elif "stripeToken" in request.form:
self.add_source(obj, request.form['stripeToken'])
elif form:
self.add_source(obj,
object="card",
number=form.card_number.data,
exp_month=form.card_exp_month.data,
exp_year=form.card_exp_year.data,
cvc=form.card_cvc.data,
name=form.card_name.data)
else:
raise Exception("No form found to retrieve the stripeToken")
@action('stripe_model_remove_source')
@as_transaction
def remove_source(self, obj, source_id=None):
if not source_id:
source_id = obj.stripe_customer.default_source
try:
source = obj.stripe_customer.sources.retrieve(source_id)
except stripe.error.InvalidRequestError:
return
source.delete()
obj.__dict__.pop('stripe_customer', None) # force refresh of customer object
self._update_model_source(obj)
def _update_model_source(self, obj, customer=None, store_ip_address=True):
if not customer:
customer = obj.stripe_customer
obj.has_stripe_source = customer.default_source is not None \
if customer and not getattr(customer, 'deleted', False) else False
obj.__dict__.pop('stripe_default_source', None)
if self.options['billing_fields'] and (obj.has_stripe_source or self.options['reset_billing_fields']):
billing_fields = ('name', 'address_line1', 'address_line2', 'address_state', 'address_city',
'address_zip', 'address_country', 'country', 'brand', 'exp_month', 'exp_year', 'last4')
source = obj.stripe_default_source if obj.has_stripe_source else None
for field in billing_fields:
setattr(obj, 'billing_%s' % field, getattr(source, field) if source else None)
if store_ip_address and obj.has_stripe_source:
obj.billing_ip_address = request.remote_addr
if self.options['eu_vat_support']:
if self.options['eu_auto_vat_country']:
country = None
if obj.has_stripe_source:
if self.options['eu_vat_use_address_country']:
country = obj.stripe_default_source.address_country
else:
country = obj.stripe_default_source.country
current_app.features.eu_vat.set_model_country(obj, country)
if self.options['eu_auto_vat_rate'] and obj.stripe_subscription and obj.should_charge_eu_vat:
self.update_subscription(obj, tax_percent=obj.eu_vat_rate)
self.model_source_updated_signal.send(obj)
@action('stripe_create_charge', as_='charge')
def create_charge(self, source, amount, currency=None, invoice_customer=None, invoice_lines=None,
invoice_tax_rate=None, **kwargs):
if currency is None:
if not self.options['default_currency']:
raise Exception('Missing currency')
currency = self.options['default_currency']
try:
charge = stripe.Charge.create(amount=int(amount), currency=currency,
source=source, **kwargs)
except stripe.error.CardError as e:
current_context['charge_error'] = e.json_body['error']
current_context.exit(trigger_action_group='charge_failed')
except Exception as e:
current_context['charge_error'] = {'message': e.message}
current_context.exit(trigger_action_group='charge_failed')
if self.options['create_charge_invoice'] and 'invoicing' in current_app.features:
current_context['invoice'] = self.create_invoice_from_charge(charge, obj=invoice_customer,
lines=invoice_lines, tax_rate=invoice_tax_rate)
return charge
@action('stripe_model_create_charge', as_='charge')
def create_customer_charge(self, obj, amount):
return self.create_charge(None, amount, customer=obj.stripe_customer.id,
invoice_customer=obj, **kwargs)
@action('stripe_model_subscribe_plan', as_='subscription')
@as_transaction
def subscribe_plan(self, obj, plan=None, quantity=1, **kwargs):
if not plan:
plan = self.options['default_plan']
if obj.plan_name == plan:
return
params = dict(plan=plan, quantity=quantity,
trial_end=self._format_trial_end(kwargs.pop('trial_end', None)))
params.update(kwargs)
if 'tax_percent' not in params:
if self.options['eu_vat_support'] and self.options['eu_auto_vat_rate']:
if obj.should_charge_eu_vat:
params['tax_percent'] = obj.eu_vat_rate
elif self.options['default_subscription_tax_percent']:
params['tax_percent'] = self.options['default_subscription_tax_percent']
subscription = obj.stripe_customer.subscriptions.create(**params)
self._update_model_subscription(obj, subscription)
save_model(obj)
return subscription
def _format_trial_end(self, trial_end=None):
if self.options['debug_trial_period'] and current_app.debug:
if self.options['debug_trial_period'] == 'now':
return 'now'
else:
trial_end = datetime.datetime.now() + \
datetime.timedelta(days=self.options['debug_trial_period'])
if isinstance(trial_end, datetime.datetime):
if trial_end <= datetime.datetime.now():
return 'now'
return int(time.mktime(trial_end.timetuple()))
return trial_end
@action('stripe_model_update_subscription')
@as_transaction
def update_subscription(self, obj, **kwargs):
subscription = obj.stripe_subscription
for k, v in kwargs.iteritems():
setattr(subscription, k, v)
subscription.save()
self._update_model_subscription(obj, subscription)
save_model(obj)
@action('stripe_model_cancel_subscription', default_option='obj')
@as_transaction
def cancel_subscription(self, obj):
obj.stripe_subscription.delete()
self._update_model_subscription(obj, False)
save_model(obj)
def _update_model_subscription(self, obj, subscription=None):
if subscription is None:
if obj.stripe_customer.subscriptions.total_count > 0:
subscription = obj.stripe_customer.subscriptions.data[0]
prev_plan = obj.plan_name
prev_status = obj.plan_status
if subscription:
obj.stripe_subscription_id = subscription.id
obj.plan_name = subscription.plan.id
obj.plan_status = subscription.status
if obj.plan_status == 'trialing':
obj.plan_next_charge_at = datetime.datetime.fromtimestamp(subscription.trial_end)
elif subscription.current_period_end:
obj.plan_next_charge_at = datetime.datetime.fromtimestamp(subscription.current_period_end)
else:
obj.plan_next_charge_at = None
else:
obj.stripe_subscription_id = None
obj.plan_name = None
obj.plan_status = 'canceled'
obj.plan_next_charge_at = None
self.model_subscription_updated_signal.send(obj, prev_plan=prev_plan, prev_status=prev_status)
@as_transaction
def update_last_subscription_charge(self, obj, invoice):
obj.plan_last_charged_at = datetime.datetime.fromtimestamp(invoice.date)
obj.plan_last_charge_amount = invoice.total / 100
obj.plan_last_charge_successful = invoice.paid
if invoice.paid:
obj.plan_next_charge_at = datetime.datetime.fromtimestamp(obj.stripe_subscription.current_period_end)
elif invoice.next_payment_attempt:
obj.plan_next_charge_at = datetime.datetime.fromtimestamp(invoice.next_payment_attempt)
else:
obj.plan_next_charge_at = None
self.model_last_charge_updated_signal.send(obj)
save_model(obj)
@as_transaction
def on_source_event(self, sender, stripe_event):
source = stripe_event.data.object
obj = self.find_model_by_customer_id(source.customer)
if not obj:
return
self._update_model_source(obj, store_ip_address=False)
save_model(obj)
@as_transaction
def on_subscription_event(self, sender, stripe_event):
subscription = stripe_event.data.object
obj = self.find_model_by_customer_id(subscription.customer)
if not obj:
return
self._update_model_subscription(obj, subscription)
save_model(obj)
def on_trial_will_end(self, sender, stripe_event):
subscription = stripe_event.data.object
obj = self.find_model_by_subscription_id(subscription.id)
if not obj:
return
if self.options['send_trial_will_end_email']:
current_app.features.emails.send(getattr(obj, self.options['email_attribute']),
'stripe/trial_will_end.txt', obj=obj)
@as_transaction
def on_invoice_payment(self, sender, stripe_event):
invoice = stripe_event.data.object
if not invoice.customer:
return
obj = self.find_model_by_customer_id(invoice.customer)
if not obj or invoice.total == 0:
return
if invoice.subscription:
sub_obj = None
if obj.stripe_subscription_id == invoice.subscription:
sub_obj = obj
else:
sub_obj = self.find_model_by_subscription_id(invoice.subscription)
if sub_obj:
self.update_last_subscription_charge(sub_obj, invoice)
if self.options['eu_vat_support'] and self.options['billing_fields'] and\
current_app.features.eu_vat.is_eu_country(obj.billing_country):
invoice.metadata['eu_vat_exchange_rate'] = current_app.services.eu_vat.get_exchange_rate(
obj.billing_country, invoice.currency.upper())
if invoice.tax:
invoice.metadata['eu_vat_amount'] = round(invoice.tax * invoice.metadata['eu_vat_exchange_rate'])
if obj.eu_vat_number:
invoice.metadata['eu_vat_number'] = obj.eu_vat_number
invoice.save()
self.invoice_payment_signal.send(invoice)
if invoice.paid and current_app.features.exists('invoicing'):
self.create_invoice_from_stripe(obj, invoice)
elif not invoice.paid and self.options['send_failed_invoice_email']:
self.send_failed_invoice_email(getattr(obj, self.options['email_attribute']), invoice)
@action('stripe_create_invoice_from_charge', default_option='charge', as_='invoice')
def create_invoice_from_charge(self, charge, obj=None, lines=None, tax_rate=None):
with current_app.features.invoicing.create(**self.options['invoice_ref_kwargs']) as invoice:
invoice.currency = charge.currency.upper()
invoice.subtotal = charge.amount / 100.0
invoice.total = charge.amount / 100.0
invoice.description = charge.description
invoice.issued_at = datetime.datetime.fromtimestamp(charge.created)
invoice.charge_id = charge.id
invoice.paid = charge.status == "succeeded"
if obj is not None:
self._fill_invoice_from_obj(invoice, obj)
if tax_rate and tax_rate == "eu_vat":
if current_app.features.eu_vat.is_eu_country(invoice.country):
tax_rate = current_app.services.eu_vat.get_vat_rate(invoice.country)
if not current_app.features.eu_vat.should_charge_vat(invoice.country, obj.eu_vat_number):
tax_rate = None
else:
tax_rate = None
if tax_rate:
invoice.tax_rate = tax_rate
invoice.subtotal = invoice.total * (100 / (100 + tax_rate));
invoice.tax_amount = invoice.total - invoice.subtotal
if lines:
for line in lines:
item = current_app.features.invoicing.item_model()
item.amount = line['amount']
item.quantity = line.get('quantity', 1)
item.currency = line.get('currency', charge.currency.upper())
item.description = line['description']
invoice.items.append(item)
def create_invoice_from_stripe(self, obj, stripe_invoice):
with current_app.features.invoicing.create(**self.options['invoice_ref_kwargs']) as invoice:
self._fill_invoice_from_obj(invoice, obj)
invoice.external_id = stripe_invoice.id
invoice.currency = stripe_invoice.currency.upper()
invoice.subtotal = stripe_invoice.subtotal / 100.0
invoice.total = stripe_invoice.total / 100.0
invoice.tax_rate = stripe_invoice.tax_percent
invoice.tax_amount = stripe_invoice.tax / 100.0 if stripe_invoice.tax else None
invoice.description = stripe_invoice.description
invoice.issued_at = datetime.datetime.fromtimestamp(stripe_invoice.date)
invoice.paid = stripe_invoice.paid
invoice.charge_id = stripe_invoice.charge
for line in stripe_invoice.lines.data:
item = current_app.features.invoicing.item_model()
item.external_id = line.id
item.amount = line.amount / 100.0
item.quantity = line.quantity
item.currency = line.currency
item.description = line.description or ''
invoice.items.append(item)
def _fill_invoice_from_obj(self, invoice, obj):
invoice.customer = obj
invoice.email = getattr(obj, self.options['email_attribute'])
if self.options['billing_fields']:
invoice.name = obj.billing_name
invoice.address_line1 = obj.billing_address_line1
invoice.address_line2 = obj.billing_address_line2
invoice.address_city = obj.billing_address_city
invoice.address_state = obj.billing_address_state
invoice.address_zip = obj.billing_address_zip
invoice.address_country = obj.billing_address_country
if obj.billing_country:
invoice.country = obj.billing_country.upper()
elif obj.billing_address_country:
invoice.country = obj.billing_address_country.upper()
def send_failed_invoice_email(self, email, invoice, **kwargs):
items = []
for line in invoice.lines.data:
items.append((line.description or '', line.quantity, line.amount / 100.0))
current_app.features.emails.send(email, 'failed_invoice.html',
invoice_date=datetime.datetime.fromtimestamp(invoice.date),
invoice_items=items,
invoice_currency=invoice.currency.upper(),
invoice_total=invoice.total / 100.0, **kwargs)
def on_model_eu_vat_rate_update(self, sender):
if sender.stripe_subscription:
sender.stripe_subscription.tax_percent = sender.eu_vat_rate
sender.stripe_subscription.save()
| {
"repo_name": "frascoweb/frasco-stripe",
"path": "frasco_stripe/__init__.py",
"copies": "1",
"size": "27615",
"license": "mit",
"hash": 6205671710279969000,
"line_mean": 47.1097560976,
"line_max": 125,
"alpha_frac": 0.6099945682,
"autogenerated": false,
"ratio": 4.034921098772648,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.003023015424819647,
"num_lines": 574
} |
from frasco import Feature, action, signal, current_app, command
from frasco_models import as_transaction, save_model, ref, transaction
import datetime
from contextlib import contextmanager
class InvoicingFeature(Feature):
name = "invoicing"
requires = ["models"]
defaults = {"model": "Invoice",
"item_model": "InvoiceItem",
"send_email": None}
invoice_issueing_signal = signal('invoice_issueing')
invoice_issued_signal = signal('invoice_issued')
def init_app(self, app):
self.ref_creator_callback = self.create_ref
self.model = app.features.models.ensure_model(self.options['model'],
ref=dict(type=str, index=True),
currency=str,
subtotal=float,
total=float,
tax_rate=float,
tax_amount=float,
description=str,
name=str,
email=str,
address_line1=str,
address_line2=str,
address_city=str,
address_state=str,
address_zip=str,
address_country=str,
country=str,
customer_special_mention=str,
issued_at=datetime.datetime,
charge_id=str,
external_id=str,
customer=ref(),
items=list)
self.item_model = app.features.models.ensure_model(self.options['item_model'],
amount=float,
description=str,
quantity=int,
subtotal=float,
currency=str,
external_id=str)
if app.features.exists("emails"):
app.features.emails.add_templates_from_package(__name__)
if self.options['send_email'] is None:
self.options['send_email'] = True
def ref_creator(self, func):
self.ref_creator_callback = func
return func
def create_ref(self, category=None, counter=None, separator='-', merge_date=True):
today = datetime.date.today()
parts = [today.year, today.month, today.day]
if merge_date:
parts = ["".join(map(str, parts))]
if category:
parts.append(category)
if counter is None:
counter = current_app.features.models.query(self.model).count() + 1
parts.append(counter)
return separator.join(map(str, parts))
@contextmanager
def create(self, **ref_kwargs):
invoice = self.model()
invoice.ref = self.ref_creator_callback(**ref_kwargs)
yield invoice
self.save(invoice)
def save(self, invoice):
with transaction():
self.invoice_issueing_signal.send(invoice)
save_model(invoice)
self.invoice_issued_signal.send(invoice)
if invoice.email and self.options['send_email']:
self.send_email(invoice.email, invoice)
def send_email(self, email, invoice, **kwargs):
items = []
for item in invoice.items:
items.append((item.description, item.quantity, item.amount))
current_app.features.emails.send(email, 'invoice.html',
invoice=invoice,
invoice_date=invoice.issued_at,
invoice_items=items,
invoice_currency=invoice.currency.upper(),
invoice_total=invoice.total,
invoice_tax=invoice.tax_amount,
invoice_tax_rate=invoice.tax_rate,
**kwargs)
@command('send_email')
def send_email_command(self, invoice_id, email=None):
invoice = current_app.features.models.query(self.model).get(invoice_id)
self.send_email(email or invoice.email, invoice) | {
"repo_name": "frascoweb/frasco-invoicing",
"path": "frasco_invoicing/__init__.py",
"copies": "1",
"size": "3652",
"license": "mit",
"hash": 9068384569402114000,
"line_mean": 33.4622641509,
"line_max": 86,
"alpha_frac": 0.5873493976,
"autogenerated": false,
"ratio": 4.075892857142857,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.005032867053175487,
"num_lines": 106
} |
from frasco import Feature, action, signal, current_app, request, current_context
from frasco.utils import AttrDict, DictObject
from jinja2 import FileSystemLoader
import os
import inspect
from .form import *
class FormsFeature(Feature):
"""Create and use forms using wtforms.
"""
name = "forms"
defaults = {"import_macros": True,
"csrf_enabled": True}
form_validation_failed_signal = signal("form_validation_failed")
form_submitted_signal = signal("form_submitted")
form_created_from_view_signal = signal("form_created_from_view")
def init_app(self, app):
self.forms = {}
if self.options["import_macros"]:
macro_file = os.path.join(os.path.dirname(__file__), "macros.html")
app.jinja_env.macros.register_file(macro_file, "form.html")
app.jinja_env.macros.alias("form_tag", "form")
def init_declarative(self, app):
self.load_forms_from_templates(app)
def load_forms_from_templates(self, app, folder="forms", prefix="__forms__"):
path = os.path.join(app.root_path, folder)
if os.path.exists(path):
app.jinja_env.loader.add_prefix(prefix, FileSystemLoader(path))
for form_class in FormLoader(path, prefix).load(app):
self.register(form_class)
def register(self, form_class):
self.forms[form_class.__name__] = form_class
return form_class
def __getitem__(self, name):
return self.forms[name]
def __setitem__(self, name, form):
self.forms[name] = form
def __contains__(self, name):
return name in self.forms
@action(default_option="form", methods=("GET", "POST"), as_="form")
def form(self, obj=None, form=None, name=None, template=None, var_name=None, validate_on_submit=True,\
exit_on_failure=True, csrf_enabled=None):
"""Loads a form and validates it (unless specified).
If the form referenced has not been loaded, an attempt to create a form
object using the information in the template will be made.
"""
if not form or isinstance(form, str):
if not name and isinstance(form, str):
name = form
if not name:
name = request.endpoint.rsplit('.', 1)[1] if '.' in request.endpoint else request.endpoint
if name not in self.forms:
raise NoFormError("Cannot find form '%s'" % name)
form = self.forms[name]
if inspect.isclass(form):
if isinstance(obj, dict):
obj = DictObject(obj)
if csrf_enabled is None:
csrf_enabled = self.options["csrf_enabled"]
form = form(obj=obj, meta={'csrf': csrf_enabled})
current_context.data.form = form
yield form
if validate_on_submit and request.method == "POST":
self.validate(form, exit_on_failure=exit_on_failure)
@action("create_form_from_view", default_option="name", as_="form_class")
def create_from_view(self, name=None, template=None, var_name=None):
if not name:
name = request.endpoint.rsplit('.', 1)[1] if '.' in request.endpoint else request.endpoint
return self.forms[name]
@create_from_view.init_view
@form.init_view
def init_form_view(self, view, opts):
"""Checks if the form referenced in the view exists or attempts to
create it by parsing the template
"""
name = opts.get("name", opts.get("form"))
if isinstance(name, Form):
return
template = opts.get("template", getattr(view, "template", None))
if not template:
if not name:
raise NoFormError("No form name specified in the form action and no template")
return
try:
as_ = opts.get("var_name", getattr(self.form, "as_", "form"))
form_class = create_from_template(current_app, template, var_name=as_)
except NoFormError:
if not name:
raise
return
if not name:
name = view.name
self.forms[name] = form_class
self.form_created_from_view_signal.send(self, view=view, form_class=form_class)
return form_class
@action("create_form_from_template", as_="form_class")
def create_from_template(self, name, template, var_name="form"):
form_class = create_from_template(current_app, template, var_name=var_name)
self.forms[name] = form_class
return form_class
@action("validate_form")
def validate(self, form=None, return_success=False, exit_on_failure=True):
ctx = current_context
if not form:
form = ctx.data.form
if not form.validate():
self.form_validation_failed_signal.send(self, form=form)
if exit_on_failure:
ctx.exit(trigger_action_group="form_validation_failed")
ctx.trigger_action_group("form_validation_failed")
self.form_submitted_signal.send(self, form=form)
if not return_success:
ctx.trigger_action_group("form_submitted")
return True
@action("form_to_obj")
def populate_obj(self, obj=None, form=None):
"""Populates an object with the form's data
"""
if not form:
form = current_context.data.form
if obj is None:
obj = AttrDict()
form.populate_obj(obj)
return obj | {
"repo_name": "frascoweb/frasco-forms",
"path": "frasco_forms/__init__.py",
"copies": "1",
"size": "5499",
"license": "mit",
"hash": -2908307197528036400,
"line_mean": 36.9310344828,
"line_max": 106,
"alpha_frac": 0.6032005819,
"autogenerated": false,
"ratio": 3.9222539229671898,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.502545450486719,
"avg_score": null,
"num_lines": null
} |
from frasco import Feature, current_app, action
from .backends import upload_backends, StorageBackend
from werkzeug import secure_filename, FileStorage
from flask import send_from_directory
import uuid
import os
from .utils import *
from io import BytesIO
from tempfile import TemporaryFile, NamedTemporaryFile, gettempdir
from flask.wrappers import Request
def _get_file_stream(self, total_content_length, content_type, filename=None, content_length=None):
if total_content_length > 1024 * 500:
return TemporaryFile('wb+', dir=os.environ.get('FRASCO_UPLOAD_TMP_DIR'))
return BytesIO()
Request._get_file_stream = _get_file_stream
class UploadFeature(Feature):
name = 'upload'
defaults = {"default_backend": "local",
"backends": {},
"upload_dir": "uploads",
"upload_url": "/uploads",
"upload_tmp_dir": None,
"uuid_prefixes": True,
"uuid_prefix_path_separator": False,
"keep_filenames": True,
"subfolders": False}
def init_app(self, app):
self.backends = {}
app.add_template_global(url_for_upload)
app.add_template_global(format_file_size)
def send_uploaded_file(filename):
return send_from_directory(self.options["upload_dir"], filename)
app.add_url_rule(self.options["upload_url"] + "/<path:filename>",
endpoint="static_upload",
view_func=send_uploaded_file)
def get_backend(self, name=None):
if isinstance(name, StorageBackend):
return name
if name is None:
name = self.options['default_backend']
if name not in self.backends:
backend = name
options = self.options
if name in self.options['backends']:
options = dict(self.options, **self.options['backends'][name])
backend = options.pop('backend')
if backend not in upload_backends:
raise Exception("Upload backend '%s' does not exist" % backend)
self.backends[name] = upload_backends[backend](options)
return self.backends[name]
def get_backend_from_filename(self, filename):
if '://' in filename:
return filename.split('://', 1)
return None, filename
@action(default_option='filename')
def generate_filename(self, filename, uuid_prefix=None, keep_filename=None, subfolders=None,
backend=None):
if uuid_prefix is None:
uuid_prefix = self.options["uuid_prefixes"]
if keep_filename is None:
keep_filename = self.options["keep_filenames"]
if subfolders is None:
subfolders = self.options["subfolders"]
if uuid_prefix and not keep_filename:
_, ext = os.path.splitext(filename)
filename = str(uuid.uuid4()) + ext
else:
filename = secure_filename(filename)
if uuid_prefix:
filename = str(uuid.uuid4()) + ("/" if self.options['uuid_prefix_path_separator'] else "-") + filename
if subfolders:
if uuid_prefix:
parts = filename.split("-", 4)
filename = os.path.join(os.path.join(*parts[:4]), filename)
else:
filename = os.path.join(os.path.join(*filename[:4]), filename)
if backend:
if backend is True:
backend = self.options['default_backend']
filename = backend + '://' + filename
return filename
def get_file_size(self, file):
file.seek(0, os.SEEK_END)
size = file.tell()
file.seek(0)
return size
@action(default_option='file')
def save_uploaded_file_temporarly(self, file, filename=None):
if filename:
tmpfilename = os.path.join(self.options['upload_tmp_dir'] or gettempdir(), filename.replace('/', '-'))
else:
_, ext = os.path.splitext(file.filename)
tmp = NamedTemporaryFile(delete=False, suffix=ext, dir=self.options['upload_tmp_dir'])
tmp.close()
tmpfilename = tmp.name
file.save(tmpfilename)
return tmpfilename
def upload(self, pathname, *args, **kwargs):
with open(pathname, 'rb') as f:
return self.save(FileStorage(f, kwargs.get('name', os.path.basename(pathname))), *args, **kwargs)
def save(self, file, filename=None, backend=None, **kwargs):
if not isinstance(file, FileStorage):
file = FileStorage(file)
if not filename:
filename = self.generate_filename(file.filename, backend=backend, **kwargs)
r = filename
if not backend or backend is True:
backend, filename = self.get_backend_from_filename(filename)
self.get_backend(backend).save(file, filename)
return r
def url_for(self, filename, backend=None, **kwargs):
if not backend:
backend, filename = self.get_backend_from_filename(filename)
return self.get_backend(backend).url_for(filename, **kwargs)
def delete(self, filename, backend=None, **kwargs):
if not backend:
backend, filename = self.get_backend_from_filename(filename)
self.get_backend(backend).delete(filename, **kwargs)
def url_for_upload(filename, **kwargs):
return current_app.features.upload.url_for(filename, **kwargs)
def format_file_size(size, suffix='B'):
for unit in ['','K','M','G','T','P','E','Z']:
if abs(size) < 1024.0:
return "%3.1f%s%s" % (size, unit, suffix)
size /= 1024.0
return "%.1f%s%s" % (size, 'Y', suffix)
try:
import frasco_forms.form
import form
frasco_forms.form.field_type_map.update({
"upload": form.FileField})
except ImportError:
pass
| {
"repo_name": "frascoweb/frasco-upload",
"path": "frasco_upload/__init__.py",
"copies": "1",
"size": "5908",
"license": "mit",
"hash": -1897133059630867200,
"line_mean": 36.1572327044,
"line_max": 118,
"alpha_frac": 0.595971564,
"autogenerated": false,
"ratio": 4.032764505119454,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5128736069119454,
"avg_score": null,
"num_lines": null
} |
from frasco import Feature, flash, url_for, lazy_translate, Blueprint, redirect, request, abort, signal, current_app
from frasco_users import current_user
from frasco_models import transaction
import requests
import json
def create_blueprint(app):
bp = Blueprint("slack", __name__)
feature = app.features.slack
users = app.features.users
@bp.route('/slack/authorize')
def login():
callback_url = url_for('.login_callback', next=request.args.get('next'), _external=True)
return feature.api.authorize(callback=callback_url)
@bp.route('/slack/authorize/callback')
def login_callback():
resp = feature.api.authorized_response()
if resp is None:
flash(feature.options["user_denied_login_message"], "error")
return redirect(url_for("users.login"))
with transaction():
users.current.slack_access_token = resp['access_token']
users.current.slack_team_name = resp['team_name']
users.current.slack_team_id = resp['team_id']
if 'incoming_webhook' in resp:
users.current.slack_incoming_webhook_url = resp['incoming_webhook']['url']
users.current.slack_incoming_webhook_channel = resp['incoming_webhook']['channel']
return redirect(request.args.get('next') or feature.options['default_redirect'])
@bp.route('/slack/command', methods=['POST'])
def command_callback():
r = feature.command_received_signal.send(feature,
token=request.form.get('token'),
team_id=request.form['team_id'],
team_domain=request.form.get('team_domain'),
channel_id=request.form.get('channel_id'),
channel_name=request.form.get('channel_name'),
user_id=request.form.get('user_id'),
user_name=request.form.get('user_name'),
command=request.form.get('command'),
text=request.form['text'],
response_url=request.form.get('response_url'))
if len(r) > 0 and r[0][1]:
data = r[0][1]
if not isinstance(data, (str, unicode)):
return json.dumps(data), {"Content-Type": "application/json"}
return data
return ""
return bp
class SlackFeature(Feature):
name = "slack"
requires = ["users"]
blueprints = [create_blueprint]
defaults = {"scope": "incoming-webhook,commands",
"user_denied_login_message": lazy_translate("Slack authorization was denied"),
"default_redirect": "index",
"use_user_model": True}
command_received_signal = signal('slack_command_received')
def init_app(self, app):
self.app = app
self.api = app.features.users.create_oauth_app("slack",
base_url='https://slack.com/api/',
request_token_url=None,
access_token_method='POST',
access_token_url='https://slack.com/api/oauth.access',
authorize_url='https://slack.com/oauth/authorize',
consumer_key=self.options["client_id"],
consumer_secret=self.options["client_secret"],
request_token_params={'scope': self.options['scope']},
access_token_params={'client_id': self.options['client_id']})
if self.options['use_user_model']:
self.model = app.features.models.ensure_model(app.features.users.model,
slack_access_token=str,
slack_incoming_webhook_url=str,
slack_incoming_webhook_channel=str,
slack_team_name=str,
slack_team_id=dict(type=str, index=True))
def post_message(self, incoming_webhook_url, text, attachments=None, channel=None):
data = {"text": text}
if attachments:
data['attachments'] = attachments
for key in ("username", "icon_url", "icon_emoji"):
if key in self.options:
data[key] = self.options[key]
if channel:
data["channel"] = channel
return requests.post(incoming_webhook_url, json=data)
def respond_to_command(self, response_url, text, attachments=None, response_type=None):
return requests.post(response_url, json=self.format_command_response(text, attachments, response_type))
def format_command_response(self, text, attachments=None, response_type=None):
data = {"text": text}
if attachments:
data['attachments'] = attachments
if response_type:
data["response_type"] = response_type
return data
def parse_command(self, text, subcommand=True):
command = None
args = []
cur = 0
while cur < len(text):
if text[cur:cur+1] == '"':
next_cur = text.find('"', cur+1)
if next_cur == -1:
next_cur = None
next = text[cur+1:next_cur]
else:
next_cur = text.find(' ', cur)
if next_cur == -1:
next_cur = None
next = text[cur:next_cur]
if subcommand and command is None:
command = next
else:
args.append(next)
if next_cur is None:
break
cur = next_cur + 1
return (command, args) | {
"repo_name": "frascoweb/frasco-slack",
"path": "frasco_slack.py",
"copies": "1",
"size": "5365",
"license": "mit",
"hash": -4866841578800424000,
"line_mean": 38.7481481481,
"line_max": 116,
"alpha_frac": 0.578751165,
"autogenerated": false,
"ratio": 4.0643939393939394,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5143145104393939,
"avg_score": null,
"num_lines": null
} |
from frasco import Feature, hook, current_context
from frasco.templating import FileLoader
from jinja2 import PackageLoader
import os
class BootstrapFeature(Feature):
name = "bootstrap"
requires = ["assets"]
defaults = {"auto_assets": True,
"with_jquery": True,
"with_fontawesome": False,
"fluid_layout": False,
"bootstrap_version": "3.3.1",
"jquery_version": "2.1.1",
"fontawesome_version": "4.2.0"}
def init_app(self, app):
path = os.path.dirname(__file__)
app.jinja_env.macros.register_package(__name__, prefix="bootstrap")
app.jinja_env.loader.feature_loaders.append(PackageLoader(__name__))
app.jinja_env.loader.set_layout_alias("bootstrap_layout.html")
app.assets.register({
"bootstrap-cdn": [
"https://maxcdn.bootstrapcdn.com/bootstrap/%s/css/bootstrap.min.css" % self.options['bootstrap_version']],
"bootstrap-theme-cdn": [
"https://maxcdn.bootstrapcdn.com/bootstrap/%s/css/bootstrap-theme.min.css" % self.options['bootstrap_version']],
"bootstrap-js-cdn": [
"https://maxcdn.bootstrapcdn.com/bootstrap/%s/js/bootstrap.min.js" % self.options['bootstrap_version']],
"bootstrap-all-cdn": [
"@bootstrap-cdn",
"@bootstrap-theme-cdn",
"@bootstrap-js-cdn"],
"jquery-cdn": [
"https://code.jquery.com/jquery-%s.min.js" % self.options['jquery_version']],
"jquery-bootstrap-all-cdn": [
"@jquery-cdn",
"@bootstrap-all-cdn"],
"font-awesome-cdn": [
"https://maxcdn.bootstrapcdn.com/font-awesome/%s/css/font-awesome.min.css" % self.options['fontawesome_version']]})
if self.options["auto_assets"]:
if self.options["with_fontawesome"]:
app.features.assets.add_default("@font-awesome-cdn")
if self.options["with_jquery"]:
app.features.assets.add_default("@jquery-bootstrap-all-cdn")
else:
app.features.assets.add_default("@bootstrap-all-cdn")
@hook()
def before_request(self):
current_context["bs_layout_fluid"] = self.options["fluid_layout"] | {
"repo_name": "frascoweb/frasco-bootstrap",
"path": "frasco_bootstrap/__init__.py",
"copies": "1",
"size": "2343",
"license": "mit",
"hash": 1929709590734892500,
"line_mean": 43.2264150943,
"line_max": 131,
"alpha_frac": 0.5740503628,
"autogenerated": false,
"ratio": 3.8035714285714284,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.987233319064687,
"avg_score": 0.0010577201449115245,
"num_lines": 53
} |
from frasco import Feature, Service, action, hook, abort, current_app, request, jsonify, g, Blueprint, url_for, signal
from frasco_models import as_transaction, transaction, save_model
import datetime
import base64
import hashlib
import uuid
import re
import os
import json
import codecs
from apispec import APISpec
from flask_cors import CORS
class ApiService(Service):
@hook('before_request')
def set_api_flag(self):
g.is_api_call = True
class AuthenticatedApiService(ApiService):
@hook('before_request')
def authenticate_before_request(self):
if request.method != 'OPTIONS' and not current_app.features.users.logged_in():
return jsonify({"error": "Request an API key from your account"}), 401
_url_arg_re = re.compile(r"<([a-z]+:)?([a-z0-9_]+)>")
def convert_url_args(url):
return _url_arg_re.sub(r"{\2}", url)
def convert_type_to_spec(type):
if type is int:
return "integer"
if type is float:
return "number"
if type is bool:
return "boolean"
return "string"
class ApiFeature(Feature):
name = 'api'
requires = ['models', 'users']
defaults = {"default_key_duration": None,
"allow_cross_requests": True,
"cors_options": {},
"cors_resources": {},
"cors_allow_services": True,
"spec_title": "API",
"spec_version": "1.0",
"swagger_client_var_name": "API",
"swagger_client_class_name": "SwaggerClient",
"swagger_client_filename": None,
"swagger_client_cdn_version": "master",
"static_dir": None,
"auto_assets": False}
def init_app(self, app):
if not self.options["static_dir"]:
self.options["static_dir"] = app.static_folder
if self.options['allow_cross_requests']:
resources = dict(**self.options['cors_resources'])
if self.options['cors_allow_services']:
resources[app.config['SERVICES_URL_PREFIX'] + '/*'] = {"origins": "*"}
cors = CORS(app, resources=resources, **self.options['cors_options'])
self.app = app
self.model = app.features.models.ensure_model('ApiKey',
user=app.features.users.model,
value=dict(type=str, index=True),
last_accessed_at=datetime.datetime,
last_accessed_from=str,
expires_at=datetime.datetime)
@app.features.users.login_manager.header_loader
def load_user_from_header(header_val):
header_val = header_val.replace('Basic ', '', 1)
try:
header_val = base64.b64decode(header_val)
key_value = header_val.split(':')[0]
except Exception:
return
key = app.features.models.find_first('ApiKey', value=key_value, not_found_404=False)
if key:
with transaction():
now = datetime.datetime.utcnow()
if key.expires_at and key.expires_at < now:
return None
key.last_accessed_at = now
key.last_accessed_from = request.remote_addr
save_model(key)
return key.user
self.register_spec_blueprint(app)
if "assets" in app.features:
app.assets.register({
"swagger-client-cdn": [
"https://cdn.rawgit.com/swagger-api/swagger-js/%s/browser/swagger-client.min.js" % self.options["swagger_client_cdn_version"]]})
if self.options['auto_assets']:
app.features.assets.add_default("@swagger-client-cdn")
if self.options["swagger_client_filename"]:
app.features.assets.register_assets_builder(self.write_swagger_client)
if self.options['auto_assets']:
app.features.assets.add_default(self.options["swagger_client_filename"])
@action('create_api_key', default_option='user', as_='api_key')
@as_transaction
def create_key(self, user=None, expires_at=None):
if not expires_at and self.options['default_key_duration']:
expires_at = datetime.datetime.now() + datetime.timedelta(
seconds=self.options['default_key_duration'])
key = self.model()
key.value = hashlib.sha1(str(uuid.uuid4)).hexdigest()
key.user = user or current_app.features.users.current
key.expires_at = expires_at
save_model(key)
return key
def register_spec_blueprint(self, app):
spec_bp = Blueprint("apispec", __name__)
cache = {}
@spec_bp.route("/spec.json")
def get_spec():
if "spec" not in cache:
cache["spec"] = self.build_spec().to_dict()
return jsonify(**cache["spec"])
@spec_bp.route("/swagger.js")
def get_swagger_client():
if "client" not in cache:
cache["client"] = self.build_swagger_client()
return cache["client"], 200, {'Content-Type': 'application/javascript'}
app.register_service_blueprint(spec_bp)
def build_spec(self):
spec = APISpec(title=self.options['spec_title'],
version=self.options['spec_version'],
basePath=self.app.config['SERVICES_URL_PREFIX'])
for name, srv in self.app.services.iteritems():
paths = {}
tag = {"name": name}
if srv.__doc__:
tag["description"] = srv.__doc__
spec.add_tag(tag)
for view in srv.views:
path = paths.setdefault(convert_url_args(view.url_rules[-1][0]), {})
for method in view.url_rules[-1][1].get('methods', ['GET']):
op = self.build_spec_operation(view, method)
op['tags'] = [name]
path[method.lower()] = op
for path, operations in paths.iteritems():
spec.add_path(path=path, operations=operations)
return spec
def build_spec_operation(self, view, method):
o = {"operationId": view.name,
"parameters": self.build_spec_params(view, method)}
if view.func.__doc__:
o['description'] = view.func.__doc__
return o
def build_spec_params(self, view, method='GET'):
params = []
if hasattr(view.func, 'request_params'):
url = convert_url_args(view.url_rules[-1][0])
for p in reversed(view.func.request_params):
for pname in p.names:
loc = "query"
if ("{%s}" % pname) in url:
loc = "path"
elif method.upper() in ("POST", "PUT"):
loc = "formData"
o = {"name": pname,
"type": convert_type_to_spec(p.type),
"required": p.required,
"in": loc}
if p.help:
o['description'] = p.help
params.append(o)
return params
def build_swagger_client(self):
return "function %s(options) { options['spec'] = %s; %s.call(this, '%s', options); } %s.prototype = new %s();" % (
self.options['swagger_client_var_name'],
json.dumps(self.build_spec().to_dict()),
self.options['swagger_client_class_name'],
url_for('apispec.get_spec', _external=True),
self.options['swagger_client_var_name'],
self.options['swagger_client_class_name']
)
def write_swagger_client(self):
filename = os.path.join(self.options["static_dir"], self.options["swagger_client_filename"])
with codecs.open(filename, "w", "utf-8") as f:
f.write(self.build_swagger_client()) | {
"repo_name": "frascoweb/frasco-api",
"path": "frasco_api.py",
"copies": "1",
"size": "7951",
"license": "mit",
"hash": 5090923792689427000,
"line_mean": 38.5621890547,
"line_max": 148,
"alpha_frac": 0.5468494529,
"autogenerated": false,
"ratio": 3.9814722083124687,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5028321661212468,
"avg_score": null,
"num_lines": null
} |
from frasco import (Feature, Service, action, signal, command, cached_property, expose,\
request_param, current_app, ServiceError, jsonify, lazy_translate)
from frasco_models import transaction, save_model
from suds.client import Client as SudsClient
from suds import WebFault
import requests
import xml.etree.ElementTree as ET
import datetime
import urllib2
EU_COUNTRIES = {
"AT": "EUR", # Austria
"BE": "EUR", # Belgium
"BG": "BGN", # Bulgaria
"DE": "EUR", # Germany
"CY": "EUR", # Cyprus
"CZ": "CZK", # Czech Republic
"DK": "DKK", # Denmark
"EE": "EUR", # Estonia
"ES": "EUR", # Spain
"FI": "EUR", # Finland
"FR": "EUR", # France,
"GB": "GBP", # Great Britain
"GR": "EUR", # Greece
"HR": "HRK", # Croatia
"HU": "HUF", # Hungary
"IE": "EUR", # Ireland
"IT": "EUR", # Italy
"LT": "EUR", # Lithuania
"LV": "EUR", # Latvia
"LU": "EUR", # Luxembourg
"MT": "EUR", # Malta
"NL": "EUR", # Netherlands
"PL": "PLN", # Poland
"PT": "EUR", # Portugal
"RO": "RON", # Romania
"SE": "SEK", # Sweden
"SI": "EUR", # Slovenia
"SK": "EUR" # Slovakia
}
KNOW_VAT_RATES = {
"AT": 20.0, # Austria
"BE": 21.0, # Belgium
"BG": 20.0, # Bulgaria
"DE": 19.0, # Germany
"CY": 19.0, # Cyprus
"CZ": 21.0, # Czech Republic
"DK": 25.0, # Denmark
"EE": 20.0, # Estonia
"ES": 21.0, # Spain
"FI": 24.0, # Finland
"FR": 20.0, # France,
"GB": 20.0, # Great Britain
"GR": 23.0, # Greece
"HR": 25.0, # Croatia
"HU": 27.0, # Hungary
"IE": 23.0, # Ireland
"IT": 22.0, # Italy
"LT": 21.0, # Lithuania
"LV": 21.0, # Latvia
"LU": 15.0, # Luxembourg
"MT": 18.0, # Malta
"NL": 21.0, # Netherlands
"PL": 23.0, # Poland
"PT": 23.0, # Portugal
"RO": 24.0, # Romania
"SE": 25.0, # Sweden
"SI": 22.0, # Slovenia
"SK": 20.0 # Slovakia
}
ECB_EUROFXREF_URL = 'http://www.ecb.europa.eu/stats/eurofxref/eurofxref-daily.xml'
ECB_EUROFXREF_XML_NS = 'http://www.ecb.int/vocabulary/2002-08-01/eurofxref'
VIES_SOAP_WSDL_URL = 'http://ec.europa.eu/taxation_customs/vies/checkVatService.wsdl'
TIC_SOAP_WSDL_URL = 'http://ec.europa.eu/taxation_customs/tic/VatRateWebService.wsdl'
_exchange_rates_cache = {}
_vat_rates_cache = {}
def is_eu_country(country_code):
return country_code and country_code.upper() in EU_COUNTRIES
def fetch_exchange_rates():
today = datetime.date.today()
if today in _exchange_rates_cache:
return _exchange_rates_cache[today]
rates = {'EUR': 1.0}
try:
r = requests.get(ECB_EUROFXREF_URL)
root = ET.fromstring(r.text)
for cube in root.findall('eu:Cube/eu:Cube/eu:Cube', {'eu': ECB_EUROFXREF_XML_NS}):
rates[cube.attrib['currency']] = float(cube.attrib['rate'])
_exchange_rates_cache[today] = rates
except Exception as e:
current_app.log_exception(e)
return rates
VIESClient = None
def get_vies_soap_client():
global VIESClient
if not VIESClient:
VIESClient = SudsClient(VIES_SOAP_WSDL_URL)
return VIESClient
TICClient = None
def get_ticc_soap_client():
global TICClient
if not TICClient:
TICClient = SudsClient(TIC_SOAP_WSDL_URL)
return TICClient
class EUVATService(Service):
name = 'eu_vat'
url_prefix = '/eu-vat'
@expose('/rates/<country_code>')
@request_param('country_code', type=str)
def get_vat_rate(self, country_code, rate_type=None):
country_code = country_code.upper()
if not is_eu_country(country_code):
raise ServiceError('Not an EU country', 404)
if not rate_type:
rate_type = current_app.features.eu_vat.options['vat_rate']
if country_code not in _vat_rates_cache:
_vat_rates_cache[country_code] = {}
try:
r = get_ticc_soap_client().service.getRates(dict(memberState=country_code,
requestDate=datetime.date.today().isoformat()))
for rate in r.ratesResponse.rate:
_vat_rates_cache[country_code][rate.type.lower()] = float(rate.value)
except Exception as e:
current_app.log_exception(e)
_vat_rates_cache.pop(country_code)
return current_app.features.eu_vat.options['backup_vat_rates'].get(country_code)
return _vat_rates_cache[country_code].get(rate_type.lower())
@expose('/validate-vat-number', methods=['POST'])
@request_param('vat_number', type=str)
def validate_vat_number(self, vat_number, raise_on_error=True):
if len(vat_number) < 3:
if raise_on_error:
raise ServiceError('VAT number too short', 400)
return False
try:
r = get_vies_soap_client().service.checkVat(vat_number[0:2].upper(), vat_number[2:])
return r.valid
except WebFault:
pass
return False
@expose('/exchange-rates/<country_code>', methods=['POST'])
@expose('/exchange-rates/<country_code>/<src_currency>', methods=['POST'])
@request_param('country_code', type=str)
@request_param('src_currency', type=str)
def get_exchange_rate(self, country_code, src_currency='EUR'):
if not is_eu_country(country_code):
raise ServiceError('Not an EU country', 404)
dest_currency = EU_COUNTRIES[country_code]
rates = fetch_exchange_rates()
if src_currency == dest_currency:
return 1.0
if src_currency == 'EUR':
return rates.get(dest_currency, 1.0)
if src_currency not in rates:
raise ServiceError('Can only use a currency listed in the ECB rates', 400)
return round(1.0 / rates[src_currency] * rates.get(dest_currency, 1.0), 5)
@expose('/check', methods=['POST'])
@request_param('country_code', type=str)
@request_param('vat_number', type=str)
@request_param('amount', type=float)
@request_param('src_currency', type=str)
def check(self, country_code, vat_number=None, amount=None, src_currency='EUR'):
if not is_eu_country(country_code):
raise ServiceError('Not an EU country', 404)
is_vat_number_valid = self.validate_vat_number(vat_number, False) if vat_number else False
o = {
"country": country_code,
"currency": EU_COUNTRIES[country_code],
"vat_rate": self.get_vat_rate(country_code),
"vat_number": vat_number,
"is_vat_number_valid": is_vat_number_valid,
"should_charge_vat": current_app.features.eu_vat.should_charge_vat(country_code, vat_number and is_vat_number_valid),
"exchange_rate": self.get_exchange_rate(country_code, src_currency),
"src_currency": src_currency
}
if amount:
rate = 0
if o['should_charge_vat']:
rate = o['vat_rate'] / 100
o.update({"amount": amount,
"vat_amount": round(amount * rate, 2),
"amount_with_vat": amount + amount * rate,
"exchanged_amount_with_vat": round((amount + amount * rate) * o["exchange_rate"], 2)})
return o
class EUVATFeature(Feature):
name = "eu_vat"
defaults = {"own_country": None,
"vat_rate": "standard",
"model": None,
"backup_vat_rates": KNOW_VAT_RATES,
"invoice_customer_mention_message": lazy_translate("VAT Number: {number}")}
model_rate_updated_signal = signal('vat_model_rate_updated')
rates_updated_signal = signal('vat_rates_updated')
def init_app(self, app):
app.register_service(EUVATService())
self.service = app.services.eu_vat
if self.options['model']:
self.model = app.features.models.ensure_model(self.options['model'],
eu_vat_country=str,
eu_vat_number=str,
eu_vat_rate=float)
self.model.should_charge_eu_vat = property(lambda s: self.should_charge_vat(s.eu_vat_country, s.eu_vat_number))
if app.features.exists('invoicing'):
app.features.models.ensure_model(app.features.invoicing.model,
is_eu_country=bool,
eu_vat_number=str,
eu_exchange_rate=float,
eu_vat_amount=float)
app.features.invoicing.invoice_issueing_signal.connect(self.on_invoice)
def is_eu_country(self, country_code):
return is_eu_country(country_code)
def should_charge_vat(self, country_code, eu_vat_number=None):
return is_eu_country(country_code) and (self.options['own_country'] == country_code\
or not eu_vat_number)
def set_model_country(self, obj, country_code):
if is_eu_country(country_code):
obj.eu_vat_country = country_code.upper()
obj.eu_vat_rate = self.service.get_vat_rate(obj.eu_vat_country)
else:
obj.eu_vat_country = None
obj.eu_vat_rate = None
@command()
def update_model_vat_rates(self):
with transaction():
query = current_app.features.models.query(self.model)
for country_code in EU_COUNTRIES:
rate = self.service.get_vat_rate(country_code)
for obj in query.filter(eu_vat_country=country_code, eu_vat_rate__ne=rate).all():
obj.eu_vat_rate = rate
self.model_rate_updated_signal.send(obj)
save_model(obj)
self.rates_updated_signal.send(self)
def on_invoice(self, sender):
if is_eu_country(sender.country):
sender.is_eu_country = True
if sender.customer:
sender.eu_vat_number = sender.customer.eu_vat_number
try:
sender.eu_exchange_rate = self.service.get_exchange_rate(sender.country, sender.currency)
if sender.tax_amount:
sender.eu_vat_amount = sender.tax_amount * sender.eu_exchange_rate
except Exception as e:
current_app.log_exception(e)
sender.eu_exchange_rate = None
if sender.eu_vat_number and self.options['invoice_customer_mention_message']:
sender.customer_special_mention = self.options['invoice_customer_mention_message'].format(
number=sender.eu_vat_number)
else:
sender.is_eu_country = False | {
"repo_name": "frascoweb/frasco-eu-vat",
"path": "frasco_eu_vat.py",
"copies": "1",
"size": "10536",
"license": "mit",
"hash": -914873355121614500,
"line_mean": 36.7670250896,
"line_max": 129,
"alpha_frac": 0.5864654518,
"autogenerated": false,
"ratio": 3.2669767441860467,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9305796338317189,
"avg_score": 0.009529171533771348,
"num_lines": 279
} |
from frasco import lazy_translate, current_app
from frasco.ext import *
from frasco.models import transaction
from flask.signals import Namespace as SignalNamespace
from .data import *
from .model import *
from .service import eu_vat_service
_signals = SignalNamespace()
model_rate_updated = _signals.signal('vat_model_rate_updated')
rates_updated = _signals.signal('vat_rates_updated')
class FrascoEUVAT(Extension):
name = "frasco_eu_vat"
defaults = {"own_country": None,
"model": None,
"invoice_customer_mention_message": lazy_translate("VAT Number: {number}")}
def _init_app(self, app, state):
if state.options['model']:
state.Model = state.import_option('model')
if has_extension('frasco_invoicing', app):
from frasco.billing.invoicing import invoice_issued
invoice_issued.connect(lambda sender: update_invoice_with_eu_vat_info(sender), weak=True)
@app.cli.command('update-eu-vat-rates')
def update_model_vat_rates():
with transaction():
for country_code in EU_COUNTRIES:
rate = get_vat_rate(country_code)
for obj in state.Model.query.filter(state.Model._eu_vat_country == country_code, state.Model.eu_vat_rate != rate).all():
obj.eu_vat_rate = rate
model_rate_updated.send(obj)
rates_updated.send()
def update_invoice_with_eu_vat_info(invoice):
state = get_extension_state('frasco_eu_vat')
if is_eu_country(invoice.country):
invoice.is_eu_country = True
if invoice.customer:
invoice.eu_vat_number = invoice.customer.eu_vat_number
try:
invoice.eu_exchange_rate = get_exchange_rate(invoice.country, invoice.currency)
if invoice.tax_amount:
invoice.eu_vat_amount = invoice.tax_amount * invoice.eu_exchange_rate
except Exception as e:
current_app.log_exception(e)
invoice.eu_exchange_rate = None
if invoice.eu_vat_number and state.options['invoice_customer_mention_message']:
invoice.customer_special_mention = state.options['invoice_customer_mention_message'].format(
number=invoice.eu_vat_number)
else:
invoice.is_eu_country = False
| {
"repo_name": "frascoweb/frasco",
"path": "frasco/billing/eu_vat/__init__.py",
"copies": "1",
"size": "2354",
"license": "mit",
"hash": 4955261930317070000,
"line_mean": 39.5862068966,
"line_max": 140,
"alpha_frac": 0.6350892099,
"autogenerated": false,
"ratio": 3.790660225442834,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4925749435342834,
"avg_score": null,
"num_lines": null
} |
from frasco import lazy_translate, request, flash, redirect
from frasco.ext import *
from frasco.utils import unknown_value
from frasco.models import as_transaction
from frasco.mail import send_mail
import stripe
import datetime
import time
import os
import json
from frasco.billing.eu_vat import get_exchange_rate, is_eu_country, model_rate_updated
from frasco.billing.invoicing import send_failed_invoice_mail
from .signals import *
from .webhook import webhook_blueprint
from .invoice import *
from .model import *
STRIPE_API_VERSION = "2020-03-02"
stripe.enable_telemetry = False
class FrascoStripe(Extension):
name = "frasco_stripe"
defaults = {"default_currency": None,
"default_plan": None,
"no_payment_redirect_to": None,
"no_payment_message": None,
"subscription_past_due_message": lazy_translate(
"We attempted to charge your credit card for your subscription but it failed."
"Please check your credit card details"),
"debug_trial_period": None,
"send_trial_will_end_email": True,
"send_failed_invoice_mail": True,
"invoice_ref_kwargs": {},
"webhook_validate_event": False}
def _init_app(self, app, state):
stripe.api_key = state.require_option('api_key')
stripe.api_version = STRIPE_API_VERSION
state.Model = state.import_option('model')
state.subscription_enabled = hasattr(state.Model, 'stripe_subscription_id')
app.register_blueprint(webhook_blueprint)
if has_extension("frasco_mail", app):
app.extensions.frasco_mail.add_templates_from_package(__name__)
if has_extension('frasco_eu_vat', app) and hasattr(state.Model, '__stripe_has_eu_vat__'):
model_rate_updated.connect(lambda sender: sender.update_stripe_subscription_tax_rates(), weak=True)
stripe_event_signal('customer_updated').connect(on_customer_updated_event)
stripe_event_signal('customer_deleted').connect(on_customer_deleted_event)
stripe_event_signal('payment_method_attached').connect(on_payment_method_event)
stripe_event_signal('payment_method_detached').connect(on_payment_method_event)
stripe_event_signal('payment_method_updated').connect(on_payment_method_event)
stripe_event_signal('payment_method_card_automatically_updated').connect(on_payment_method_event)
stripe_event_signal('invoice_payment_succeeded').connect(on_invoice_payment)
stripe_event_signal('invoice_payment_failed').connect(on_invoice_payment)
if state.subscription_enabled:
stripe_event_signal('customer_subscription_created').connect(on_subscription_event)
stripe_event_signal('customer_subscription_updated').connect(on_subscription_event)
stripe_event_signal('customer_subscription_deleted').connect(on_subscription_event)
stripe_event_signal('customer_subscription_trial_will_end').connect(on_trial_will_end)
stripe_event_signal('invoice_created').connect(on_subscription_invoice_created)
if hasattr(state.Model, '__stripe_has_eu_vat__'):
stripe_event_signal('customer_tax_id_created').connect(on_tax_id_event)
stripe_event_signal('customer_tax_id_updated').connect(on_tax_id_event)
stripe_event_signal('customer_tax_id_deleted').connect(on_tax_id_event)
@as_transaction
def on_customer_updated_event(sender, stripe_event):
state = get_extension_state('frasco_stripe')
customer = stripe_event.data.object
obj = state.Model.query_by_stripe_customer(customer.id).first()
if obj:
obj._update_stripe_customer(customer)
@as_transaction
def on_customer_deleted_event(sender, stripe_event):
state = get_extension_state('frasco_stripe')
customer = stripe_event.data.object
obj = state.Model.query_by_stripe_customer(customer.id).first()
if obj:
obj._update_stripe_customer(False)
@as_transaction
def on_payment_method_event(sender, stripe_event):
state = get_extension_state('frasco_stripe')
source = stripe_event.data.object
obj = state.Model.query_by_stripe_customer(source.customer).first()
if obj:
obj._update_stripe_customer()
@as_transaction
def on_tax_id_event(sender, stripe_event):
state = get_extension_state('frasco_stripe')
obj = state.Model.query_by_stripe_customer(stripe_event.data.object.customer).first()
if obj:
obj.update_from_stripe_eu_vat_number()
@as_transaction
def on_subscription_event(sender, stripe_event):
state = get_extension_state('frasco_stripe')
subscription = stripe_event.data.object
obj = state.Model.query_by_stripe_customer(subscription.customer).first()
if obj:
obj._update_stripe_subscription()
@as_transaction
def on_subscription_invoice_created(sender, stripe_event):
state = get_extension_state('frasco_stripe')
invoice = stripe_event.data.object
if not invoice.subscription:
return
obj = state.Model.query_by_stripe_customer(invoice.customer).first()
if obj:
obj.plan_has_invoice_items = False
model_subscription_invoice_created.send(obj)
@as_transaction
def on_trial_will_end(sender, stripe_event):
state = get_extension_state('frasco_stripe')
subscription = stripe_event.data.object
obj = state.Model.query_by_stripe_subscription(subscription.id).first()
if obj and state.options['send_trial_will_end_email']:
send_mail(getattr(obj, obj.__stripe_email_property__), 'stripe/trial_will_end.txt', obj=obj)
@as_transaction
def on_invoice_payment(sender, stripe_event):
state = get_extension_state('frasco_stripe')
invoice = stripe_event.data.object
if not invoice.customer:
return
obj = state.Model.query_by_stripe_customer(invoice.customer).first()
if not obj or invoice.total == 0:
return
if invoice.subscription:
sub_obj = None
if obj.stripe_subscription_id == invoice.subscription:
sub_obj = obj
else:
sub_obj = state.Model.query_by_stripe_subscription(invoice.subscription).first()
if sub_obj:
sub_obj.plan_status = sub_obj.stripe_subscription.status
sub_obj.update_last_stripe_subscription_invoice(invoice)
invoice_payment.send(invoice)
if has_extension('frasco_invoicing'):
if invoice.paid:
create_invoice_from_stripe(obj, invoice)
elif not invoice.paid and state.options['send_failed_invoice_mail']:
send_failed_invoice_mail(getattr(obj, obj.__stripe_email_property__), invoice)
| {
"repo_name": "frascoweb/frasco",
"path": "frasco/billing/stripe/__init__.py",
"copies": "1",
"size": "6714",
"license": "mit",
"hash": 8777067766481382000,
"line_mean": 39.4457831325,
"line_max": 111,
"alpha_frac": 0.6869228478,
"autogenerated": false,
"ratio": 3.6429734129137277,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.48298962607137275,
"avg_score": null,
"num_lines": null
} |
from frasco import request_param
from frasco.api import ApiService, ApiInputError, ApiNotFoundError
from . import data
eu_vat_service = ApiService('eu_vat', url_prefix='/eu-vat')
@eu_vat_service.route('/rates/<country_code>')
@request_param('country_code', type=str)
def get_vat_rate(country_code):
try:
return data.get_vat_rate(country_code)
except data.EUVATError as e:
raise ApiInputError(str(e))
@eu_vat_service.route('/validate-vat-number', methods=['POST'])
@request_param('vat_number', type=str)
def validate_vat_number(vat_number):
try:
return data.validate_vat_number(vat_number, invalid_format_raise_error=True)
except data.EUVATError as e:
raise ApiInputError(str(e))
@eu_vat_service.route('/exchange-rates/<country_code>', methods=['POST'])
@eu_vat_service.route('/exchange-rates/<country_code>/<src_currency>', methods=['POST'])
@request_param('country_code', type=str)
@request_param('src_currency', type=str)
def get_exchange_rate(country_code, src_currency='EUR'):
try:
return data.get_exchange_rate(country_code, src_currency)
except data.EUVATError as e:
raise ApiInputError(str(e))
@eu_vat_service.route('/check', methods=['POST'])
@request_param('country_code', type=str)
@request_param('vat_number', type=str)
@request_param('amount', type=float)
@request_param('src_currency', type=str)
def check(country_code, vat_number=None, amount=None, src_currency='EUR'):
if not data.is_eu_country(country_code):
raise ApiNotFoundError('Not an EU country')
is_vat_number_valid = data.validate_vat_number(vat_number) if vat_number else False
o = {
"country": country_code,
"currency": data.EU_COUNTRIES[country_code],
"vat_rate": data.get_vat_rate(country_code),
"vat_number": vat_number,
"is_vat_number_valid": is_vat_number_valid,
"should_charge_vat": data.should_charge_vat(country_code, vat_number and is_vat_number_valid),
"exchange_rate": eu_vat_service.get_exchange_rate(country_code, src_currency),
"src_currency": src_currency
}
if amount:
rate = 0
if o['should_charge_vat']:
rate = o['vat_rate'] / 100
o.update({"amount": amount,
"vat_amount": round(amount * rate, 2),
"amount_with_vat": amount + amount * rate,
"exchanged_amount_with_vat": round((amount + amount * rate) * o["exchange_rate"], 2)})
return o
| {
"repo_name": "frascoweb/frasco",
"path": "frasco/billing/eu_vat/service.py",
"copies": "1",
"size": "2530",
"license": "mit",
"hash": 7909503107000663000,
"line_mean": 36.2058823529,
"line_max": 106,
"alpha_frac": 0.6494071146,
"autogenerated": false,
"ratio": 3.320209973753281,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4469617088353281,
"avg_score": null,
"num_lines": null
} |
from frasco.models import db
from sqlalchemy.ext.declarative import declared_attr
from .data import is_eu_country, should_charge_vat, get_vat_rate
class EUVATModelMixin(object):
@declared_attr
def _eu_vat_country(cls):
return db.deferred(db.Column('eu_vat_country', db.String), group='eu_vat')
@declared_attr
def eu_vat_number(cls):
return db.deferred(db.Column(db.String), group='eu_vat')
@declared_attr
def eu_vat_rate(cls):
return db.deferred(db.Column(db.Float), group='eu_vat')
def should_charge_eu_vat(self):
return should_charge_vat(self.eu_vat_country, self.eu_vat_number)
@property
def eu_vat_country(self):
return self._eu_vat_country
@eu_vat_country.setter
def eu_vat_country(self, value):
if is_eu_country(value):
self._eu_vat_country = value.upper()
self.eu_vat_rate = get_vat_rate(self.eu_vat_country)
else:
self._eu_vat_country = None
self.eu_vat_rate = None
class EUVATInvoiceModelMixin(object):
is_eu_country = db.Column(db.Boolean, default=False)
eu_vat_number = db.Column(db.String)
eu_exchange_rate = db.Column(db.Float)
eu_vat_amount = db.Column(db.Float, default=0)
| {
"repo_name": "frascoweb/frasco",
"path": "frasco/billing/eu_vat/model.py",
"copies": "1",
"size": "1261",
"license": "mit",
"hash": -4773686099186008000,
"line_mean": 30.525,
"line_max": 82,
"alpha_frac": 0.6526566217,
"autogenerated": false,
"ratio": 3.144638403990025,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.929578900159364,
"avg_score": 0.00030120481927710846,
"num_lines": 40
} |
from frasco.models import db
class InvoiceModelMixin(object):
ref = db.Column(db.String, index=True)
currency = db.Column(db.String)
subtotal = db.Column(db.Float)
total = db.Column(db.Float)
tax_amount = db.Column(db.Float)
description = db.Column(db.String)
name = db.Column(db.String)
email = db.Column(db.String)
address_line1 = db.Column(db.String)
address_line2 = db.Column(db.String)
address_city = db.Column(db.String)
address_state = db.Column(db.String)
address_zip = db.Column(db.String)
address_country = db.Column(db.String)
country = db.Column(db.String)
customer_special_mention = db.Column(db.String)
issued_at = db.Column(db.DateTime)
charge_id = db.Column(db.String)
external_id = db.Column(db.String)
class InvoiceItemModelMixin(object):
amount = db.Column(db.Float)
tax_amount = db.Column(db.Float)
tax_rate = db.Column(db.Float)
description = db.Column(db.String)
quantity = db.Column(db.Integer)
subtotal = db.Column(db.Float)
currency = db.Column(db.String)
external_id = db.Column(db.String)
| {
"repo_name": "frascoweb/frasco",
"path": "frasco/billing/invoicing/model.py",
"copies": "1",
"size": "1128",
"license": "mit",
"hash": -7306746708476978000,
"line_mean": 32.1764705882,
"line_max": 51,
"alpha_frac": 0.679964539,
"autogenerated": false,
"ratio": 3.1074380165289255,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4287402555528925,
"avg_score": null,
"num_lines": null
} |
from frasco.models import db, transaction
from frasco.models.utils import MutableList
from frasco import current_app
from frasco.ext import get_extension_state
from frasco.redis import redis
from flask_login import UserMixin
from sqlalchemy.dialects import postgresql
import datetime
import uuid
__all__ = ('UserModelMixin', 'UserWithUsernameModelMixin', 'UserLastAccessAtModelMixin',
'UserOTPCodeMixin', 'UserLoginModelMixin', 'UserEmailValidatedMixin', 'UserAuthTokenMixin')
class UserModelMixin(UserMixin):
email = db.Column(db.String)
password = db.Column(db.String, nullable=True)
last_password_change_at = db.Column(db.DateTime)
last_password_change_from = db.Column(db.String)
previous_passwords = db.Column(MutableList.as_mutable(postgresql.ARRAY(db.String)))
must_reset_password_at_login = db.Column(db.Boolean, default=False)
signup_at = db.Column(db.DateTime, default=datetime.datetime.utcnow)
signup_from = db.Column(db.String)
signup_provider = db.Column(db.String)
signup_country = db.Column(db.String)
last_login_at = db.Column(db.DateTime)
last_login_from = db.Column(db.String)
last_login_provider = db.Column(db.String)
updated_at = db.Column(db.DateTime)
auth_providers = db.Column(MutableList.as_mutable(postgresql.ARRAY(db.String)), default=list)
@classmethod
def query_by_email(cls, email):
return cls.query.filter(cls.email == email.strip().lower())
@classmethod
def query_by_identifier(cls, identifier):
return cls.query_by_email(identifier)
class UserWithUsernameModelMixin(UserModelMixin):
username = db.Column(db.String, unique=True)
@classmethod
def query_by_username(cls, username):
return cls.query.filter(db.func.lower(cls.username) == username.strip().lower())
@classmethod
def query_by_username_or_email(cls, identifier):
return cls.query.filter(db.or_(db.func.lower(cls.username) == identifier.strip().lower(),
cls.email == identifier.strip().lower()))
@classmethod
def query_by_identifier(cls, identifier):
return cls.query_by_username(identifier)
class UserLastAccessAtModelMixin(object):
last_access_at = db.Column(db.Date)
def update_last_access_at(self):
if not self.id:
return
today_key = "users-last-access-%s" % datetime.date.today().isoformat()
if not redis.getbit(today_key, self.id):
with transaction():
self.last_access_at = datetime.date.today()
redis.setbit(today_key, self.id, 1)
redis.expire(today_key, 86500)
class UserOTPCodeMixin(object):
two_factor_auth_enabled = db.Column(db.Boolean, default=False)
otp_code = db.Column(db.String, unique=True)
otp_recovery_code = db.Column(db.String)
class UserLoginModelMixin(object):
login_at = db.Column(db.DateTime)
login_from = db.Column(db.String)
login_provider = db.Column(db.String)
login_country = db.Column(db.String)
login_user_agent = db.Column(db.String)
class UserEmailValidatedMixin(object):
email_validated = db.Column(db.Boolean, default=False)
email_validated_at = db.Column(db.DateTime)
class UserAuthTokenMixin(object):
__token_identifier_property__ = 'auth_token'
__session_cookie_identifier__ = 'auth_token'
auth_token = db.Column(db.String, default=lambda: str(uuid.uuid4()), unique=True)
@classmethod
def query_by_auth_token(cls, token):
return cls.query.filter(cls.auth_token == token)
@classmethod
def get_by_auth_token(cls, token):
return cls.query_by_auth_token(token).first()
def get_id(self):
# for Flask-Login
return self.auth_token
def invalidate_auth_token(self):
self.auth_token = str(uuid.uuid4())
| {
"repo_name": "frascoweb/frasco",
"path": "frasco/users/model.py",
"copies": "1",
"size": "3830",
"license": "mit",
"hash": -8107963434983320000,
"line_mean": 33.5045045045,
"line_max": 102,
"alpha_frac": 0.6942558747,
"autogenerated": false,
"ratio": 3.4598012646793133,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4654057139379313,
"avg_score": null,
"num_lines": null
} |
from frasco_upload.backends import StorageBackend, file_upload_backend
from frasco import current_app
@file_upload_backend
class S3StorageBackend(StorageBackend):
name = 's3'
def save(self, file, filename, force_sync=False):
kwargs = dict(filename=filename, content_disposition_filename=file.filename,
bucket=self.get_option('upload_bucket'), acl=self.get_option('upload_acl'),
prefix=self.get_option('upload_filename_prefix'))
if not force_sync and self.get_option('upload_async') and current_app.features.exists('tasks'):
tmpname = current_app.features.upload.save_uploaded_file_temporarly(file, filename)
current_app.features.tasks.enqueue('upload_file_to_s3',
stream_or_filename=tmpname, mimetype=file.mimetype, delete_source=True, **kwargs)
else:
current_app.features.aws.upload_file_to_s3(file, filename, **kwargs)
def url_for(self, filename, **kwargs):
bucket = self.get_option('upload_bucket')
if self.get_option('upload_signed_url'):
b = current_app.features.aws.s3_connection.get_bucket(bucket)
k = b.get_key(filename)
kwargs.setdefault('expires_in', self.get_option('upload_s3_urls_ttl'))
return k.generate_url(**kwargs)
return 'https://%s.s3.amazonaws.com/%s' % (bucket, filename)
def delete(self, filename, force_sync=False):
if not force_sync and self.get_option('upload_async') and current_app.features.exists('tasks'):
current_app.features.tasks.enqueue('delete_s3_file', filename=filename)
else:
current_app.features.aws.delete_s3_file(filename)
def get_option(self, key):
return self.options.get(key, current_app.features.aws.options.get(key)) | {
"repo_name": "frascoweb/frasco-aws",
"path": "frasco_aws/upload.py",
"copies": "1",
"size": "1808",
"license": "mit",
"hash": 7708057467779292000,
"line_mean": 47.8918918919,
"line_max": 103,
"alpha_frac": 0.6675884956,
"autogenerated": false,
"ratio": 3.601593625498008,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4769182121098008,
"avg_score": null,
"num_lines": null
} |
from frbpoppy import hist
from goodness_of_fit import GoodnessOfFit
from matplotlib.lines import Line2D
from scipy.optimize import curve_fit
from tests.convenience import plot_aa_style, rel_path
import matplotlib.pyplot as plt
import numpy as np
class Plot():
"""Plot runs."""
def __init__(self):
plot_aa_style()
plt.rcParams['figure.figsize'] = (5.75373, (5.75373/3)*4)
plt.rcParams['font.size'] = 9
# plt.rcParams['xtick.major.pad'] = 10
# plt.rcParams['ytick.major.pad'] = 10
# plt.rcParams['axes.titlepad'] = 10
self.fig, self.axes = plt.subplots(4, 3, sharey='row')
self.colors = plt.rcParams['axes.prop_cycle'].by_key()['color']
self.gf = GoodnessOfFit()
self.df = self.gf.so.df
# Calculate global maximums
self.gm = {}
for run in self.df.run.unique():
self.gm[run] = self.gf.calc_global_max(run)
print(self.gm)
# Plot various subplots
self.alpha()
self.si()
self.li()
self.li_2()
self.lum_min()
self.lum_max()
self.w_int_mean()
self.w_int_std()
self.legend()
self.dm_igm_slope()
self.dm_host()
self.axes[3, 2].set_axis_off()
# Plot run rectangles
# self.runs()
# Save plot
plt.tight_layout() # rect=[0, 0, 0.98, 1])
plt.subplots_adjust(wspace=0.1)
plt.savefig(rel_path('./plots/mc/mc.pdf'))
def alpha(self):
ax = self.axes[0, 0]
parm = 'alpha'
label = r'$\alpha$'
runs = [1, 5, 8]
ax.set_yscale('log', nonposy='clip')
ax.set_ylabel(r'GoF')
ax.set_xlabel(label)
best_value = np.nan
# Plot runs
for i, run in enumerate(runs):
bins, gofs = self.get_data(run, parm)
ax.step(bins, gofs, where='mid')
# Plot global maximum
try:
best_value, best_gof = self.gm[run][parm]
ax.plot([best_value]*2, [1e-1, best_gof], color=self.colors[i], linestyle='--')
ax.scatter([best_value], [best_gof], marker='x', color=self.colors[i])
except KeyError:
i -= 1
continue
if i==1 and not np.isnan(best_value):
title = fr'{label}=${best_value:.1f}$'
ax.set_title(title, fontsize=10, color=self.colors[i])
def si(self):
ax = self.axes[0, 1]
parm = 'si'
label = r'\text{si}'
runs = [1, 5, 8]
ax.set_yscale('log', nonposy='clip')
ax.set_xlabel(label)
best_value = np.nan
# Plot runs
for i, run in enumerate(runs):
bins, gofs = self.get_data(run, parm)
ax.step(bins, gofs, where='mid')
# Plot global maximum
try:
best_value, best_gof = self.gm[run][parm]
ax.plot([best_value]*2, [1e-1, best_gof], color=self.colors[i], linestyle='--')
ax.scatter([best_value], [best_gof], marker='x', color=self.colors[i])
except KeyError:
i -= 1
continue
if i == 1 and not np.isnan(best_value):
title = fr'{label}=${best_value:.1f}$'
ax.set_title(title, fontsize=10, color=self.colors[i])
def li(self):
ax = self.axes[0, 2]
parm = 'li'
# label = r'\text{lum$_{\text{i}}$}'
label = 'li'
runs = [1, 5, 8]
ax.set_yscale('log', nonposy='clip')
ax.set_xlabel(label)
ax_right = ax.twinx()
ax_right.set_ylabel('Set 1', labelpad=10)
ax_right.tick_params(axis='y', which='both', right=False, labelright=False)
best_value = np.nan
# Plot runs
for i, run in enumerate(runs):
bins, gofs = self.get_data(run, parm)
ax.step(bins, gofs, where='mid')
# Plot global maximum
try:
best_value, best_gof = self.gm[run][parm]
ax.plot([best_value]*2, [1e-1, best_gof], color=self.colors[i], linestyle='--')
ax.scatter([best_value], [best_gof], marker='x', color=self.colors[i])
except KeyError:
i -= 1
continue
if i == 1 and not np.isnan(best_value):
title = fr'{label}=${best_value:.1f}$'
ax.set_title(title, fontsize=10, color=self.colors[i])
def li_2(self):
ax = self.axes[1, 0]
ax.set_ylabel(r'GoF')
parm = 'li'
label = 'li'
runs = [2]
ax.set_yscale('log', nonposy='clip')
ax.set_xlabel(label)
best_value = np.nan
# Plot runs
for i, run in enumerate(runs):
bins, gofs = self.get_data(run, parm)
ax.step(bins, gofs, where='mid')
# Plot global maximum
try:
best_value, best_gof = self.gm[run][parm]
ax.plot([best_value]*2, [1e-1, best_gof], color=self.colors[i], linestyle='--')
ax.scatter([best_value], [best_gof], marker='x', color=self.colors[i])
except KeyError:
i -= 1
continue
# if not np.isnan(best_value):
# title = fr'{label}=${best_value:.1f}$'
# ax.set_title(title, fontsize=10, color=self.colors[i])
def lum_min(self):
ax = self.axes[1, 1]
ax.set_xscale('log')
label = r'\text{lum$_{\text{min}}$}'
parm = 'lum_min'
runs = [2]
ax.set_yscale('log', nonposy='clip')
ax.set_xlabel(label + r' (erg s$^{-1}$)')
best_value = np.nan
# Plot runs
for i, run in enumerate(runs):
bins, gofs = self.get_data(run, parm)
ax.step(bins, gofs, where='mid')
# Plot global maximum
try:
best_value, best_gof = self.gm[run][parm]
ax.plot([best_value]*2, [1e-1, best_gof], color=self.colors[i], linestyle='--')
ax.scatter([best_value], [best_gof], marker='x', color=self.colors[i])
except KeyError:
i -= 1
continue
# if not np.isnan(best_value):
# title = fr'{label}=${best_value:.1e}$'
# ax.set_title(title, fontsize=10, color=self.colors[i])
def lum_max(self):
ax = self.axes[1, 2]
ax.set_xscale('log')
label = r'\text{lum$_{\text{max}}$}'
parm = 'lum_max'
runs = [2]
ax.set_yscale('log', nonposy='clip')
ax.set_xlabel(label + r' (erg s$^{-1}$)')
ax_right = ax.twinx()
ax_right.set_ylabel('Set 2', labelpad=10)
ax_right.tick_params(axis='y', which='both', right=False, labelright=False)
best_value = np.nan
# Plot runs
for i, run in enumerate(runs):
bins, gofs = self.get_data(run, parm)
ax.step(bins, gofs, where='mid')
# Plot global maximum
try:
best_value, best_gof = self.gm[run][parm]
ax.plot([best_value]*2, [1e-1, best_gof], color=self.colors[i], linestyle='--')
ax.scatter([best_value], [best_gof], marker='x', color=self.colors[i])
except KeyError:
i -= 1
continue
# if not np.isnan(best_value):
# title = fr'{label}=${best_value:.1e}$'
# ax.set_title(title, fontsize=10, color=self.colors[i])
def w_int_mean(self):
ax = self.axes[2, 0]
ax.set_xscale('log')
ax.set_ylabel(r'GoF')
label = r'\text{w$_{\text{int, mean}}$}'
parm = 'w_mean'
runs = [3, 6, 9]
ax.set_yscale('log', nonposy='clip')
ax.set_xlabel(fr'{label} (ms)')
best_value = np.nan
# Plot runs
for i, run in enumerate(runs):
bins, gofs = self.get_data(run, parm)
ax.step(bins, gofs, where='mid')
# Plot global maximum
try:
best_value, best_gof = self.gm[run][parm]
ax.plot([best_value]*2, [1e-1, best_gof], color=self.colors[i], linestyle='--')
ax.scatter([best_value], [best_gof], marker='x', color=self.colors[i])
except KeyError:
i -= 1
continue
if i == 1 and not np.isnan(best_value):
title = fr'{label}=${best_value:.1e}$'
ax.set_title(title, fontsize=10, color=self.colors[i])
def w_int_std(self):
ax = self.axes[2, 1]
label = r'\text{w$_{\text{int, std}}$}'
parm = 'w_std'
runs = [3, 6, 9]
ax.set_yscale('log', nonposy='clip')
ax.set_xlabel(fr'{label} (ms)')
ax_right = ax.twinx()
ax_right.set_ylabel('Set 3', labelpad=10)
ax_right.tick_params(axis='y', which='both', right=False, labelright=False)
best_value = np.nan
# Plot runs
for i, run in enumerate(runs):
bins, gofs = self.get_data(run, parm)
ax.step(bins, gofs, where='mid')
# Plot global maximum
try:
best_value, best_gof = self.gm[run][parm]
ax.plot([best_value]*2, [1e-1, best_gof], color=self.colors[i], linestyle='--')
ax.scatter([best_value], [best_gof], marker='x', color=self.colors[i])
except KeyError:
i -= 1
continue
if i == 1 and not np.isnan(best_value):
title = fr'{label}=${best_value:.1f}$'
ax.set_title(title, fontsize=10, color=self.colors[i])
def dm_igm_slope(self):
ax = self.axes[3, 0]
ax.set_ylabel(r'GoF')
label = r'\text{DM$_{\text{IGM, slope}}$}'
parm = 'dm_igm_slope'
runs = [4, 7, 10]
ax.set_yscale('log', nonposy='clip')
ax.set_xlabel(label + r' ($\textrm{pc}\ \textrm{cm}^{-3}$)')
best_value = np.nan
# Plot runs
for i, run in enumerate(runs):
bins, gofs = self.get_data(run, parm)
ax.step(bins, gofs, where='mid')
# Plot global maximum
try:
best_value, best_gof = self.gm[run][parm]
ax.plot([best_value]*2, [1e-1, best_gof], color=self.colors[i], linestyle='--')
ax.scatter([best_value], [best_gof], marker='x', color=self.colors[i])
except KeyError:
i -= 1
continue
if i == 1 and not np.isnan(best_value):
title = fr'{label}=${best_value:.0f}$'
ax.set_title(title, fontsize=10, color=self.colors[i])
def dm_host(self):
ax = self.axes[3, 1]
label = r'\text{DM$_{\text{Host}}$}'
parm = 'dm_host'
runs = [4, 7, 10]
ax.set_yscale('log', nonposy='clip')
ax.set_xlabel(label + r' ($\textrm{pc}\ \textrm{cm}^{-3}$)')
ax_right = ax.twinx()
ax_right.set_ylabel('Set 4', labelpad=10)
ax_right.tick_params(axis='y', which='both', right=False, labelright=False)
best_value = np.nan
# Plot runs
for i, run in enumerate(runs):
bins, gofs = self.get_data(run, parm)
ax.step(bins, gofs, where='mid')
# Plot global maximum
try:
best_value, best_gof = self.gm[run][parm]
ax.plot([best_value]*2, [1e-1, best_gof], color=self.colors[i], linestyle='--')
ax.scatter([best_value], [best_gof], marker='x', color=self.colors[i])
except KeyError:
i -= 1
continue
if i == 1 and not np.isnan(best_value):
title = fr'{label}=${best_value:.0f}$'
ax.set_title(title, fontsize=10, color=self.colors[i])
def legend(self):
ax = self.axes[2, 2]
# Add legend elements
elements = []
line = Line2D([0], [0], color=self.colors[0])
elements.append((line, r'Cycle 1'))
line = Line2D([0], [0], color=self.colors[1])
elements.append((line, r'Cycle 2'))
line = Line2D([0], [0], color=self.colors[2])
elements.append((line, r'Cycle 3'))
line = Line2D([0], [0], color='grey', linestyle='--')
label = r'Max GoF'
elements.append((line, label))
lines, labels = zip(*elements)
self.fig.legend(lines, labels, bbox_to_anchor=(0.84, 0.4),
loc='center')
ax.set_axis_off()
def get_data(self, run_number, par):
df = self.df[self.df.run == run_number]
if df.empty:
return [np.nan], [np.nan]
gofs = []
bins = []
for bin_val, group in df.groupby(par):
gof = self.gf.weighted_median(group)
gofs.append(gof)
bins.append(bin_val)
bins = np.array(bins)
gofs = np.array(gofs)
diff = np.diff(bins)
bin_type = 'lin'
if not np.isclose(diff[0], diff[1]):
bin_type = 'log'
bins, gofs = self.gf.add_edges_to_hist(bins, gofs, bin_type=bin_type)
gofs[np.isnan(gofs)] = 1e-1
return bins, gofs
if __name__ == '__main__':
Plot()
| {
"repo_name": "davidgardenier/frbpoppy",
"path": "tests/monte_carlo/plot.py",
"copies": "1",
"size": "13349",
"license": "mit",
"hash": 4566525575374997500,
"line_mean": 32.3725,
"line_max": 95,
"alpha_frac": 0.4967413289,
"autogenerated": false,
"ratio": 3.291173570019724,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4287914898919724,
"avg_score": null,
"num_lines": null
} |
from fred.clients.categories import CategoriesClient
from fred.clients.releases import ReleasesClient
from fred.clients.tags import TagsClient
from fred.clients.sources import SourcesClient
from fred.clients.eseries import ESeriesClient
import fred.config as c
import weakref
## Establish Federal Reserve Economic Data (Fred) wrapper for Python
class Fred(object):
"""
Fred client. Provides a straightforward mapping from Python to FRED REST endpoints.
The instance has attributes ``cateogry``, ``release``, ``series``, ``tag``
and ``source`` that provide access to instances of
:class:`fred.clients.categories.CategoriesClient`,
:class:`fred.clients.releases.ReleasesClient`,
:class:`fred.clients.eseries.ESeriesClient`,
:class:`fred.clients.tags.TagsClient` and
:class:`fred.clients.sources.SourcesClient` respectively. This is the
preferred (and only supported) way to get access to those classes and their
methods.
:arg str api_key: 32 character alpha-numeric lowercase string. Required.
:arg str realtime_start: The start of the real-time period. Format "YYYY-MM-DD"
:arg str realtime_end: The end of the real-time period. Format "YYYY-MM-DD"
:arg bool ssl_verify: To verify HTTPs.
"""
def __init__(self,api_key=c.api_key,response_type=c.response_type, ssl_verify=c.ssl_verify):
## Set root URL
self.url_root = 'https://api.stlouisfed.org/fred'
## Set default API key
self.api_key = api_key if api_key else None
## Set default file type
self.response_type = response_type if response_type else None
## Set SSL Verify
self.ssl_verify = ssl_verify
## Initiate clients
self.category = CategoriesClient(weakref.proxy(self),self.api_key,self.url_root,self.response_type,self.ssl_verify)
self.release = ReleasesClient(weakref.proxy(self),self.api_key,self.url_root,self.response_type,self.ssl_verify)
self.series = ESeriesClient(weakref.proxy(self),self.api_key,self.url_root,self.response_type,self.ssl_verify)
self.tag = TagsClient(weakref.proxy(self),self.api_key,self.url_root,self.response_type,self.ssl_verify)
self.source = SourcesClient(weakref.proxy(self),self.api_key,self.url_root,self.response_type,self.ssl_verify)
| {
"repo_name": "avelkoski/FRB",
"path": "fred/__init__.py",
"copies": "1",
"size": "2309",
"license": "mit",
"hash": -1133619696383050200,
"line_mean": 53.9761904762,
"line_max": 123,
"alpha_frac": 0.7223906453,
"autogenerated": false,
"ratio": 3.5577812018489983,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.47801718471489985,
"avg_score": null,
"num_lines": null
} |
from freebase.schema import dump_base, dump_type, restore
try:
import jsonlib2 as json
except ImportError:
try:
import simplejson as json
except ImportError:
import json
import sys
def cmd_dump_base(fb, baseid):
"""dump a base to stdout
%prog dump_base baseid
Dump a base by outputting a json representation
of the types and properties involved.
"""
print >> sys.stdout, json.dumps(dump_base(fb.mss, baseid), indent=2)
def cmd_dump_type(fb, typeid, follow_types=True):
"""dump a type to stdout
%prog dump_type typeid [follow_types=True]
Dump a type by outputting a json representation
of the type and properties involved.
"""
print >> sys.stdout, json.dumps(dump_type(fb.mss, typeid, follow_types), indent=2)
def cmd_restore(fb, newlocation, graphfile):
"""restore a graph object to the graph
%prog restore newlocation graphfile
Restore a graph object to the newlocation
"""
fh = open(graphfile, "r")
graph = json.loads(fh.read())
fh.close()
return restore(fb.mss, graph, newlocation, ignore_types=None)
| {
"repo_name": "tfmorris/freebase-python",
"path": "freebase/fcl/schema.py",
"copies": "5",
"size": "1121",
"license": "bsd-2-clause",
"hash": 6122798271702516000,
"line_mean": 26.3414634146,
"line_max": 86,
"alpha_frac": 0.6815343443,
"autogenerated": false,
"ratio": 3.6633986928104574,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6844933037110458,
"avg_score": null,
"num_lines": null
} |
from freemix.dataset.transform import RawTransformView, AkaraTransformClient
from freemix.dataset.augment import models
from freemix.dataset.augment import conf
from django.views.generic.base import View
from freemix.views import JSONResponse
class JSONView(View):
template=None
def get_dict(self, *args, **kwargs):
return {}
def get(self, *args, **kwargs):
content = self.get_dict(*args, **kwargs)
return JSONResponse(content, self.template)
class ListPatternJSONView(JSONView):
def get_dict(self, *args, **kwargs):
return models.ListPattern.to_dict()
pattern_jsonp = ListPatternJSONView.as_view(template="freemix/augment/patterns.js")
pattern_json = ListPatternJSONView.as_view()
class AugmentationErrorJSONView(JSONView):
def get_dict(self, *args, **kwargs):
return models.AugmentationErrorCode.to_dict()
error_json = AugmentationErrorJSONView.as_view()
transform = RawTransformView.as_view(transform=AkaraTransformClient(conf.AKARA_AUGMENT_URL))
| {
"repo_name": "zepheira/freemix",
"path": "freemix/dataset/augment/views.py",
"copies": "1",
"size": "1026",
"license": "apache-2.0",
"hash": -6750666305143666000,
"line_mean": 26,
"line_max": 92,
"alpha_frac": 0.7456140351,
"autogenerated": false,
"ratio": 3.5136986301369864,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9742256049899904,
"avg_score": 0.0034113230674164873,
"num_lines": 38
} |
from freenect import sync_get_depth as get_depth
import numpy as np
import pygame
from Xlib import X, display
import Xlib.XK
import Xlib.error
import Xlib.ext.xtest
class BlobAnalysis:
def __init__(self, BW):#Constructor. BW es una imagen binaria en forma de una matriz numpy
self.BW=BW
cs = cv.FindContours(cv.fromarray(self.BW.astype(np.uint8)),cv.CreateMemStorage(),mode = cv.CV_RETR_EXTERNAL) #Encuentra los contornos
counter = 0
centroid = list()
cHull = list()
contours = list()
ChullArea = list()
contourArea = list()
while cs:
if asb(cv.ContourArea(cs)) > 2000 #Filtra contornos pequeños de mas de 2000 pixeles en el área
ContourArea.append(cv.ContourArea(cs)) #Añade ContourArea con el nuevo contorno
m = cv.Moments(cs)
try:
m10 = int(cv.GetSpatialMoment(m,1,0))
m00 = int(cv.GetSpatialMoment(m,0,0))
m01 = int(cv.GetSpatialMoment(m,0,1))
centroid.append(cv.Convexhull2(cs,cv.CreateMemStorage(),return_points=True) #Busca el convezo de cd en CvSeq convexHull = cv.ConvexHull2(cs,cv.CreateMemStorage(),return_points=True) #Agrega el formulario de lista ConvexHull a la lista cHull cHullArea.append(cv.ContourArea(convexHull)) #Agrega el formulario de lista de la envolvente convexa a la lista cHull
cHull.append(list(convexHull)) #Agrega el formulario de lista del contorno a la lista de contornos
contours.append(list(cs))
counter += 1 #Añade al counter para ver cuantos pixeles están all
except:
pass
cs = cs.h_next()
self.centroid = centroid
self.counter = counter
self.cHull = cHull
self.contours = contours
self.cHullArea = cHullArea
self.contourArea = contourArea
d = display.Display() #Pantalla de referencia para la manipulación Xlib
def move_mouse(x,y):#Mueve el ratón en (x,y). "x" y "y" son enteros
s = d.screen()
root = s.root
root.warp_pointer(x,y)
d.sync()
def click_down(button):#Simula un clic abajo. Button es un entero
Xlib.ext.xtest.fake_input(d,X.ButtonPress, button)
d.sync()
def click_up(button): #Simula un clic arriba. Button es un entero
Xlib.ext.xtest.fake_input(d,X.ButtonRelease, button)
d.sync()
def cacheAppendMean(cache, val):
cache.append(val)
del cache[0]
return np.mean(cache)
def hand_tracker():
(depth,_) = get_depth()
cHullAreaCache = constList(5,12000)
areaRatioCache = constList(5,1)
centroidList = list() #Iniciar centroid list
#Colores basicos RGB
BLACK = (0,0,0)
RED = (255,0,0)
GREEN = (0,255,0)
PURPLE = (255,0,255)
BLUE = (0,0,255)
WHITE = (255,255,255)
YELLOW = (255,255,0)
pygame.init() #Inicializar pygame
xSize,ySize = 640,480 #Ajusta resolución de la pantalla
screen = pygame.display.set_mode((xSize,ySize),pygame.RESIZABLE) #Crea la interfaz principal
screenFlipped = pygame.display.set_mode((xSize,ySize),pygame.RESIZABLE)
screen.fill(BLACK) #Hacer fondo negro
done = False #Repetir boolean --> Le dice al programa cuando finalizar
dummy = False
while not done:
screen.fill(BLACK)
(depth,_) = get_depth() #Obtener la profundidad del Kinect
depth = depth.astype(np.float32) #Convertir la profunidad del objeto a 32 bits
_,depthThresh = cv2.threshold(depth, 600, 255, cv2.THRESH_BINARY_INV)
_,back = cv2.threshold(depth, 900, 255, cv2.THRESH_BINARY_INV)
blobData = BlobAnalysis(depthThresh) #Crear blobData object usando BlobAnalysis class
blobDataBack = BlobAnalysis(back)
for cont in blobDataBack.contours: #Repite los contornos en el fondo
pygame.draw.lines(screen,YELLOW,True,cont,3)
for i in range(blobData.counter): #Repite
pygame.draw.circle(screen,BLUE,blobData.centroid[i],10)
centroidList.append(blobData.centroid[i])
pygame.draw.lines(screen,RED,True,blobData.cHull[i],3)
pygame.draw.lines(screen,GREEN,True,blobData.contours[i],3)
for tips in blobData.cHull[i]:
pygame.draw.circle(screen,PURPLE,tips,5)
pygame.display.set_caption('Kinect Tracking')
del depth
screenFlipped = pygame.transform.flip(screen,1,0)
screen.blit(screenFlipped,(0,0))
pygame.display.flip()
try:
centroidX = blobData.centroid[0][0]
centroidY = blobData.centroid[0][1]
if dummy:
mousePtr = display.Display().screen().root.query_pointer()._data #Gets current mouse attributes
dX = centroidX - strX
dY = strY - centroidY
if abs(dX) > 1:
mouseX = mousePtr["root_x"] - 2*dX
if abs(dY) > 1:
mouseY = mousePtr["root_y"] - 2*dY
move_mouse(mouseX,mouseY)
strX = centroidX
strY = centroidY
cArea = cacheAppendMean(cHullAreaCache,blobData.cHullArea[0])
areaRatio = cacheAppendMean(areaRatioCache, blobData.contourArea[0]/cArea)
if cArea < 10000 and areaRatio > 0.82:
click_down(1)
else:
click_up(1)
else:
strX = centroidX
strY = centroidY
dummy = True
except:
dummy = False
for e in pygame.event.get():
if e.type is pygame.QUIT:
done = True
try:
hand_tracker()
except:
pass
| {
"repo_name": "jhonduarte/kinnect",
"path": "examples/demo_control.py",
"copies": "1",
"size": "6175",
"license": "mit",
"hash": 998354736703652200,
"line_mean": 42.7375886525,
"line_max": 419,
"alpha_frac": 0.5673747365,
"autogenerated": false,
"ratio": 3.517969195664575,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4585343932164575,
"avg_score": null,
"num_lines": null
} |
from freenect import sync_get_depth as get_depth #Uses freenect to get depth information from the Kinect
import numpy as np #Imports NumPy
import cv,cv2 #Uses both of cv and cv2
import pygame #Uses pygame
#The libaries below are used for mouse manipulation
from Xlib import X, display
import Xlib.XK
import Xlib.error
import Xlib.ext.xtest
constList = lambda length, val: [val for _ in range(length)] #Gives a list of size length filled with the variable val. length is a list and val is dynamic
"""
This class is a less extensive form of regionprops() developed by MATLAB. It finds properties of contours and sets them to fields
"""
class BlobAnalysis:
def __init__(self,BW): #Constructor. BW is a binary image in the form of a numpy array
self.BW = BW
cs = cv.FindContours(cv.fromarray(self.BW.astype(np.uint8)),cv.CreateMemStorage(),mode = cv.CV_RETR_EXTERNAL) #Finds the contours
counter = 0
"""
These are dynamic lists used to store variables
"""
centroid = list()
cHull = list()
contours = list()
cHullArea = list()
contourArea = list()
while cs: #Iterate through the CvSeq, cs.
if abs(cv.ContourArea(cs)) > 2000: #Filters out contours smaller than 2000 pixels in area
contourArea.append(cv.ContourArea(cs)) #Appends contourArea with newest contour area
m = cv.Moments(cs) #Finds all of the moments of the filtered contour
try:
m10 = int(cv.GetSpatialMoment(m,1,0)) #Spatial moment m10
m00 = int(cv.GetSpatialMoment(m,0,0)) #Spatial moment m00
m01 = int(cv.GetSpatialMoment(m,0,1)) #Spatial moment m01
centroid.append((int(m10/m00), int(m01/m00))) #Appends centroid list with newest coordinates of centroid of contour
convexHull = cv.ConvexHull2(cs,cv.CreateMemStorage(),return_points=True) #Finds the convex hull of cs in type CvSeq
cHullArea.append(cv.ContourArea(convexHull)) #Adds the area of the convex hull to cHullArea list
cHull.append(list(convexHull)) #Adds the list form of the convex hull to cHull list
contours.append(list(cs)) #Adds the list form of the contour to contours list
counter += 1 #Adds to the counter to see how many blobs are there
except:
pass
cs = cs.h_next() #Goes to next contour in cs CvSeq
"""
Below the variables are made into fields for referencing later
"""
self.centroid = centroid
self.counter = counter
self.cHull = cHull
self.contours = contours
self.cHullArea = cHullArea
self.contourArea = contourArea
d = display.Display() #Display reference for Xlib manipulation
def move_mouse(x,y):#Moves the mouse to (x,y). x and y are ints
s = d.screen()
root = s.root
root.warp_pointer(x,y)
d.sync()
def click_down(button):#Simulates a down click. Button is an int
Xlib.ext.xtest.fake_input(d,X.ButtonPress, button)
d.sync()
def click_up(button): #Simulates a up click. Button is an int
Xlib.ext.xtest.fake_input(d,X.ButtonRelease, button)
d.sync()
"""
The function below is a basic mean filter. It appends a cache list and takes the mean of it.
It is useful for filtering noisy data
cache is a list of floats or ints and val is either a float or an int
it returns the filtered mean
"""
def cacheAppendMean(cache, val):
cache.append(val)
del cache[0]
return np.mean(cache)
"""
This is the GUI that displays the thresholded image with the convex hull and centroids. It uses pygame.
Mouse control is also dictated in this function because the mouse commands are updated as the frame is updated
"""
def hand_tracker():
(depth,_) = get_depth()
cHullAreaCache = constList(5,12000) #Blank cache list for convex hull area
areaRatioCache = constList(5,1) #Blank cache list for the area ratio of contour area to convex hull area
centroidList = list() #Initiate centroid list
#RGB Color tuples
BLACK = (0,0,0)
RED = (255,0,0)
GREEN = (0,255,0)
PURPLE = (255,0,255)
BLUE = (0,0,255)
WHITE = (255,255,255)
YELLOW = (255,255,0)
pygame.init() #Initiates pygame
xSize,ySize = 640,480 #Sets size of window
screen = pygame.display.set_mode((xSize,ySize),pygame.RESIZABLE) #creates main surface
screenFlipped = pygame.display.set_mode((xSize,ySize),pygame.RESIZABLE) #creates surface that will be flipped (mirror display)
screen.fill(BLACK) #Make the window black
done = False #Iterator boolean --> Tells programw when to terminate
dummy = False #Very important bool for mouse manipulation
while not done:
screen.fill(BLACK) #Make the window black
(depth,_) = get_depth() #Get the depth from the kinect
depth = depth.astype(np.float32) #Convert the depth to a 32 bit float
_,depthThresh = cv2.threshold(depth, 600, 255, cv2.THRESH_BINARY_INV) #Threshold the depth for a binary image. Thresholded at 600 arbitary units
_,back = cv2.threshold(depth, 900, 255, cv2.THRESH_BINARY_INV) #Threshold the background in order to have an outlined background and segmented foreground
blobData = BlobAnalysis(depthThresh) #Creates blobData object using BlobAnalysis class
blobDataBack = BlobAnalysis(back) #Creates blobDataBack object using BlobAnalysis class
for cont in blobDataBack.contours: #Iterates through contours in the background
pygame.draw.lines(screen,YELLOW,True,cont,3) #Colors the binary boundaries of the background yellow
for i in range(blobData.counter): #Iterate from 0 to the number of blobs minus 1
pygame.draw.circle(screen,BLUE,blobData.centroid[i],10) #Draws a blue circle at each centroid
centroidList.append(blobData.centroid[i]) #Adds the centroid tuple to the centroidList --> used for drawing
pygame.draw.lines(screen,RED,True,blobData.cHull[i],3) #Draws the convex hull for each blob
pygame.draw.lines(screen,GREEN,True,blobData.contours[i],3) #Draws the contour of each blob
for tips in blobData.cHull[i]: #Iterates through the verticies of the convex hull for each blob
pygame.draw.circle(screen,PURPLE,tips,5) #Draws the vertices purple
"""
#Drawing Loop
#This draws on the screen lines from the centroids
#Possible exploration into gesture recognition :D
for cent in centroidList:
pygame.draw.circle(screen,BLUE,cent,10)
"""
pygame.display.set_caption('Kinect Tracking') #Makes the caption of the pygame screen 'Kinect Tracking'
del depth #Deletes depth --> opencv memory issue
screenFlipped = pygame.transform.flip(screen,1,0) #Flips the screen so that it is a mirror display
screen.blit(screenFlipped,(0,0)) #Updates the main screen --> screen
pygame.display.flip() #Updates everything on the window
#Mouse Try statement
try:
centroidX = blobData.centroid[0][0]
centroidY = blobData.centroid[0][1]
if dummy:
mousePtr = display.Display().screen().root.query_pointer()._data #Gets current mouse attributes
dX = centroidX - strX #Finds the change in X
dY = strY - centroidY #Finds the change in Y
if abs(dX) > 1: #If there was a change in X greater than 1...
mouseX = mousePtr["root_x"] - 2*dX #New X coordinate of mouse
if abs(dY) > 1: #If there was a change in Y greater than 1...
mouseY = mousePtr["root_y"] - 2*dY #New Y coordinate of mouse
move_mouse(mouseX,mouseY) #Moves mouse to new location
strX = centroidX #Makes the new starting X of mouse to current X of newest centroid
strY = centroidY #Makes the new starting Y of mouse to current Y of newest centroid
cArea = cacheAppendMean(cHullAreaCache,blobData.cHullArea[0]) #Normalizes (gets rid of noise) in the convex hull area
areaRatio = cacheAppendMean(areaRatioCache, blobData.contourArea[0]/cArea) #Normalizes the ratio between the contour area and convex hull area
if cArea < 10000 and areaRatio > 0.82: #Defines what a click down is. Area must be small and the hand must look like a binary circle (nearly)
click_down(1)
else:
click_up(1)
else:
strX = centroidX #Initializes the starting X
strY = centroidY #Initializes the starting Y
dummy = True #Lets the function continue to the first part of the if statement
except: #There may be no centroids and therefore blobData.centroid[0] will be out of range
dummy = False #Waits for a new starting point
for e in pygame.event.get(): #Itertates through current events
if e.type is pygame.QUIT: #If the close button is pressed, the while loop ends
done = True
try: #Kinect may not be plugged in --> weird erros
hand_tracker()
except: #Lets the libfreenect errors be shown instead of python ones
pass
| {
"repo_name": "ActiveState/code",
"path": "recipes/Python/578104_OpenKinect_Mouse_Control_Using/recipe-578104.py",
"copies": "1",
"size": "9409",
"license": "mit",
"hash": -7824071109695744000,
"line_mean": 52.7657142857,
"line_max": 161,
"alpha_frac": 0.6568179403,
"autogenerated": false,
"ratio": 3.8154906731549065,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.49723086134549066,
"avg_score": null,
"num_lines": null
} |
from freeradius.models import UserData, UserQuota, UserBillingDetail, UserInfo, RadPostAuth, RadReply, RadCheck, UserInfo
from rest_framework import serializers
class UserDataSerializer(serializers.ModelSerializer):
class Meta:
model = UserData
fields = ('username', 'datain', 'dataout', 'totaldata', 'data_hour', 'date')
class UserQuotaSerializer(serializers.ModelSerializer):
class Meta:
model = UserQuota
fields = ('username', 'quota_date', 'quota')
class UserBillingSerializer(serializers.ModelSerializer):
class Meta:
model = UserBillingDetail
fields = ('username', 'anniversary_day', 'action', 'status')
class UserInfoSerializer(serializers.ModelSerializer):
class Meta:
model = UserInfo
fields = ('username', 'name', 'mail', 'department', 'workphone', 'homephone', 'mobile')
class RadPostAuthSerializer(serializers.ModelSerializer):
class Meta:
model = RadPostAuth
fields = ('__all__')
class RadCheckSerializer(serializers.ModelSerializer):
class Meta:
model = RadCheck
fields = ('__all__')
class RadReplySerializer(serializers.ModelSerializer):
class Meta:
model = RadReply
fields = ('__all__')
class UserInfoSerializer(serializers.ModelSerializer):
class Meta:
model = UserInfo
fields = ('__all__')
| {
"repo_name": "realworldtech/radius_restserver",
"path": "src/freeradius/serializers.py",
"copies": "1",
"size": "1261",
"license": "mit",
"hash": -7521683231507072000,
"line_mean": 28.3255813953,
"line_max": 121,
"alpha_frac": 0.739888977,
"autogenerated": false,
"ratio": 3.5322128851540615,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4772101862154061,
"avg_score": null,
"num_lines": null
} |
from freesasa import *
import unittest
import math
import os
# this class tests using derived classes to create custom Classifiers
class DerivedClassifier(Classifier):
purePython = True
def classify(self,residueName,atomName):
return 'bla'
def radius(self,residueName,atomName):
return 10
class FreeSASATestCase(unittest.TestCase):
def testParameters(self):
d = defaultParameters
p = Parameters()
self.assertTrue(p.algorithm() == LeeRichards)
self.assertTrue(p.algorithm() == d['algorithm'])
self.assertTrue(p.probeRadius() == d['probe-radius'])
self.assertTrue(p.nPoints() == d['n-points'])
self.assertTrue(p.nSlices() == d['n-slices'])
self.assertTrue(p.nThreads() == d['n-threads'])
self.assertRaises(AssertionError,lambda: Parameters({'not-an-option' : 1}))
self.assertRaises(AssertionError,lambda: Parameters({'n-slices' : 50, 'not-an-option' : 1}))
self.assertRaises(AssertionError,lambda: Parameters({'not-an-option' : 50, 'also-not-an-option' : 1}))
p.setAlgorithm(ShrakeRupley)
self.assertTrue(p.algorithm() == ShrakeRupley)
p.setAlgorithm(LeeRichards)
self.assertTrue(p.algorithm() == LeeRichards)
self.assertRaises(AssertionError,lambda: p.setAlgorithm(-10))
p.setProbeRadius(1.5)
self.assertTrue(p.probeRadius() == 1.5)
self.assertRaises(AssertionError,lambda: p.setProbeRadius(-1))
p.setNPoints(20)
self.assertTrue(p.nPoints() == 20)
self.assertRaises(AssertionError,lambda: p.setNPoints(0))
p.setNSlices(10)
self.assertTrue(p.nSlices() == 10)
self.assertRaises(AssertionError,lambda: p.setNSlices(0))
p.setNThreads(2)
self.assertTrue(p.nThreads() == 2)
self.assertRaises(AssertionError, lambda: p.setNThreads(0))
def testResult(self):
r = Result()
self.assertRaises(AssertionError,lambda: r.totalArea())
self.assertRaises(AssertionError,lambda: r.atomArea(0))
def testClassifier(self):
c = Classifier()
self.assertTrue(c._isCClassifier())
self.assertTrue(c.classify("ALA"," CB ") == apolar)
self.assertTrue(c.classify("ARG"," NH1") == polar)
self.assertTrue(c.radius("ALA"," CB ") == 1.88)
setVerbosity(silent)
self.assertRaises(Exception,lambda: Classifier("data/err.config"))
self.assertRaises(IOError,lambda: Classifier(""))
setVerbosity(normal)
c = Classifier("data/test.config")
self.assertTrue(c.classify("AA","aa") == "Polar")
self.assertTrue(c.classify("BB","bb") == "Apolar")
self.assertTrue(c.radius("AA","aa") == 1.0)
self.assertTrue(c.radius("BB","bb") == 2.0)
c = Classifier("share/oons.config")
self.assertTrue(c.radius("ALA"," CB ") == 2.00)
c = DerivedClassifier()
self.assertTrue(not c._isCClassifier())
self.assertTrue(c.radius("ALA"," CB ") == 10)
self.assertTrue(c.radius("ABCDEFG","HIJKLMNO") == 10)
self.assertTrue(c.classify("ABCDEFG","HIJKLMNO") == "bla")
def testStructure(self):
self.assertRaises(IOError,lambda: Structure("xyz#$%"))
setVerbosity(silent)
# test any file that's not a PDB file
self.assertRaises(Exception,lambda: Structure("data/err.config"))
self.assertRaises(Exception,lambda: Structure("data/empty.pdb"))
self.assertRaises(Exception,lambda: Structure("data/empty_model.pdb"))
setVerbosity(normal)
s = Structure("data/1ubq.pdb")
self.assertTrue(s.nAtoms() == 602)
self.assertTrue(s.radius(1) == 1.88)
self.assertTrue(s.chainLabel(1) == 'A')
self.assertTrue(s.atomName(1) == ' CA ')
self.assertTrue(s.residueName(1) == 'MET')
self.assertTrue(s.residueNumber(1) == ' 1')
s2 = Structure("data/1ubq.pdb",Classifier("share/oons.config"))
self.assertTrue(s.nAtoms() == 602)
self.assertTrue(math.fabs(s2.radius(1) - 2.0) < 1e-5)
s2 = Structure("data/1ubq.pdb",Classifier("share/protor.config"))
for i in range (0,601):
self.assertTrue(math.fabs(s.radius(i)- s2.radius(i)) < 1e-5)
self.assertRaises(Exception,lambda: Structure("data/1ubq.pdb","data/err.config"))
s = Structure()
s.addAtom(' CA ','ALA',' 1','A',1,1,1)
self.assertTrue(s.nAtoms() == 1)
self.assertTrue(s.atomName(0) == ' CA ')
self.assertTrue(s.residueName(0) == 'ALA')
self.assertTrue(s.residueNumber(0) == ' 1')
self.assertTrue(s.chainLabel(0) == 'A')
self.assertTrue(s.nAtoms() == 1)
x, y, z = s.coord(0)
self.assertTrue(x == 1 and y ==1 and z ==1)
s.addAtom(' CB ','ALA',2,'A',2,1,1)
self.assertTrue(s.nAtoms() == 2)
self.assertTrue(s.residueNumber(1) == '2')
self.assertRaises(AssertionError, lambda: s.atomName(3))
self.assertRaises(AssertionError, lambda: s.residueName(3))
self.assertRaises(AssertionError, lambda: s.residueNumber(3))
self.assertRaises(AssertionError, lambda: s.chainLabel(3))
self.assertRaises(AssertionError, lambda: s.coord(3))
self.assertRaises(AssertionError, lambda: s.radius(3))
s.setRadiiWithClassifier(Classifier())
self.assertTrue(s.radius(0) == 1.88)
self.assertTrue(s.radius(1) == 1.88)
s.setRadiiWithClassifier(DerivedClassifier())
self.assertTrue(s.radius(0) == s.radius(1) == 10.0)
s.setRadii([1.0,3.0])
self.assertTrue(s.radius(0) == 1.0)
self.assertTrue(s.radius(1) == 3.0)
s.setRadius(0, 10.0)
self.assertTrue(s.radius(0) == 10.0);
self.assertRaises(AssertionError,lambda: s.setRadius(2,10));
self.assertRaises(AssertionError,lambda: s.setRadii([1]))
self.assertRaises(AssertionError,lambda: s.setRadii([1,2,3]))
self.assertRaises(AssertionError,lambda: s.atomName(2))
self.assertRaises(AssertionError,lambda: s.residueName(2))
self.assertRaises(AssertionError,lambda: s.residueNumber(2))
self.assertRaises(AssertionError,lambda: s.chainLabel(2))
setVerbosity(nowarnings)
s = Structure("data/1d3z.pdb",None,{'hydrogen' : True})
self.assertTrue(s.nAtoms() == 1231)
s = Structure("data/1d3z.pdb",None,{'hydrogen' : True, 'join-models' : True})
self.assertTrue(s.nAtoms() == 12310)
s = Structure("data/1ubq.pdb",None,{'hetatm' : True})
self.assertTrue(s.nAtoms() == 660)
s = Structure("data/1d3z.pdb",None,{'hydrogen' : True, 'skip-unknown' : True})
self.assertTrue(s.nAtoms() == 602)
setVerbosity(silent)
self.assertRaises(Exception, lambda : Structure("data/1d3z.pdb",None,{'hydrogen' : True, 'halt-at-unknown' : True}))
setVerbosity(normal)
def testStructureArray(self):
# default separates chains, only uses first model (129 atoms per chain)
ss = structureArray("data/2jo4.pdb")
self.assertTrue(len(ss) == 4)
for s in ss:
self.assertTrue(s.nAtoms() == 129)
# include all models, separate chains, and include hydrogen and hetatm (286 atoms per chain)
setVerbosity(nowarnings)
ss = structureArray("data/2jo4.pdb",{'separate-models' : True,
'hydrogen' : True,
'hetatm' : True,
'separate-chains' : True})
self.assertTrue(len(ss) == 4*10)
for s in ss:
self.assertTrue(s.nAtoms() == 286)
# include all models, and include hydrogen and hetatm (286 atoms per chain)
ss = structureArray("data/2jo4.pdb",{'separate-models' : True,
'hydrogen' : True,
'hetatm' : True})
self.assertTrue(len(ss) == 10)
for s in ss:
self.assertTrue(s.nAtoms() == 286*4)
setVerbosity(normal)
# check that the structures initialized this way can be used for calculations
ss = structureArray("data/1ubq.pdb")
self.assertTrue(len(ss) == 1)
self.assertTrue(ss[0].nAtoms() == 602)
result = calc(ss[0],Parameters({'algorithm' : ShrakeRupley}))
self.assertTrue(math.fabs(result.totalArea() - 4834.716265) < 1e-5)
# Test exceptions
setVerbosity(silent)
self.assertRaises(AssertionError,lambda: structureArray(None))
self.assertRaises(IOError,lambda: structureArray(""))
self.assertRaises(Exception,lambda: structureArray("data/err.config"))
self.assertRaises(AssertionError,lambda: structureArray("data/2jo4.pdb",{'not-an-option' : True}))
self.assertRaises(AssertionError,
lambda: structureArray("data/2jo4.pdb",
{'not-an-option' : True, 'hydrogen' : True}))
self.assertRaises(AssertionError,
lambda: structureArray("data/2jo4.pdb",
{'hydrogen' : True}))
setVerbosity(normal)
def testCalc(self):
# test default settings
structure = Structure("data/1ubq.pdb")
result = calc(structure,Parameters({'algorithm' : ShrakeRupley}))
self.assertTrue(math.fabs(result.totalArea() - 4834.716265) < 1e-5)
sasa_classes = classifyResults(result,structure)
self.assertTrue(math.fabs(sasa_classes['Polar'] - 2515.821238) < 1e-5)
self.assertTrue(math.fabs(sasa_classes['Apolar'] - 2318.895027) < 1e-5)
# test L&R
result = calc(structure,Parameters({'algorithm' : LeeRichards, 'n-slices' : 20}))
sasa_classes = classifyResults(result,structure)
self.assertTrue(math.fabs(result.totalArea() - 4804.055641) < 1e-5)
self.assertTrue(math.fabs(sasa_classes['Polar'] - 2504.217302) < 1e-5)
self.assertTrue(math.fabs(sasa_classes['Apolar'] - 2299.838339) < 1e-5)
# test extending Classifier with derived class
sasa_classes = classifyResults(result,structure,DerivedClassifier())
self.assertTrue(math.fabs(sasa_classes['bla'] - 4804.055641) < 1e-5)
## test calculating with user-defined classifier ##
classifier = Classifier("share/oons.config")
# classifier passed to assign user-defined radii, could also have used setRadiiWithClassifier()
structure = Structure("data/1ubq.pdb",classifier)
result = calc(structure,Parameters({'algorithm' : ShrakeRupley}))
self.assertTrue(math.fabs(result.totalArea() - 4779.5109924) < 1e-5)
sasa_classes = classifyResults(result,structure,classifier) # classifier passed to get user-classes
self.assertTrue(math.fabs(sasa_classes['Polar'] - 2236.9298941) < 1e-5)
self.assertTrue(math.fabs(sasa_classes['Apolar'] - 2542.5810983) < 1e-5)
def testCalcCoord(self):
# one unit sphere
radii = [1]
coord = [0,0,0]
parameters = Parameters()
parameters.setNSlices(5000)
parameters.setProbeRadius(0)
result = calcCoord(coord, radii, parameters)
self.assertTrue(math.fabs(result.totalArea() - 4*math.pi) < 1e-3)
# two separate unit spheres
radii = [1,1]
coord = [0,0,0, 4,4,4]
result = calcCoord(coord, radii, parameters)
self.assertTrue(math.fabs(result.totalArea() - 2*4*math.pi) < 1e-3)
self.assertRaises(AssertionError,
lambda: calcCoord(radii, radii))
def testSelectArea(self):
structure = Structure("data/1ubq.pdb")
result = calc(structure,Parameters({'algorithm' : ShrakeRupley}))
# will only test that this gets through to the C interface,
# extensive checking of the parser is done in the C unit tests
selections = selectArea(('s1, resn ala','s2, resi 1'),structure,result)
self.assertTrue(math.fabs(selections['s1'] - 118.35) < 0.1)
self.assertTrue(math.fabs(selections['s2'] - 50.77) < 0.1)
def testBioPDB(self):
try:
from Bio.PDB import PDBParser
except ImportError:
print("Can't import Bio.PDB, tests skipped")
pass
else:
parser = PDBParser()
bp_structure = parser.get_structure("Ubiquitin","data/1ubq.pdb")
s1 = structureFromBioPDB(bp_structure)
s2 = Structure("data/1ubq.pdb")
self.assertTrue(s1.nAtoms() == s2.nAtoms())
for i in range(0, s2.nAtoms()):
self.assertTrue(s1.radius(i) == s2.radius(i))
# there can be tiny errors here
self.assertTrue(math.fabs(s1.coord(i)[0] - s2.coord(i)[0]) < 1e-5)
self.assertTrue(math.fabs(s1.coord(i)[1] - s2.coord(i)[1]) < 1e-5)
self.assertTrue(math.fabs(s1.coord(i)[2] - s2.coord(i)[2]) < 1e-5)
# because Bio.PDB structures will have slightly different
# coordinates (due to rounding errors) we set the
# tolerance as high as 1e-3
result = calc(s1, Parameters({'algorithm' : LeeRichards, 'n-slices' : 20}))
print(result.totalArea())
self.assertTrue(math.fabs(result.totalArea() - 4804.055641) < 1e-3)
sasa_classes = classifyResults(result, s1)
self.assertTrue(math.fabs(sasa_classes['Polar'] - 2504.217302) < 1e-3)
self.assertTrue(math.fabs(sasa_classes['Apolar'] - 2299.838339) < 1e-3)
result, sasa_classes = calcBioPDB(bp_structure, Parameters({'algorithm' : ShrakeRupley}))
self.assertTrue(math.fabs(result.totalArea() - 4834.716265) < 1e-3)
self.assertTrue(math.fabs(sasa_classes['Polar'] - 2515.821238) < 1e-3)
self.assertTrue(math.fabs(sasa_classes['Apolar'] - 2318.895027) < 1e-3)
print(result.totalArea())
if __name__ == '__main__':
# make sure we're in the right directory (if script is called from
# outside the directory)
abspath = os.path.abspath(__file__)
dirname = os.path.dirname(abspath)
os.chdir(dirname)
unittest.main()
| {
"repo_name": "JoaoRodrigues/freesasa",
"path": "bindings/python/test.py",
"copies": "1",
"size": "14379",
"license": "mit",
"hash": -8068843702292461000,
"line_mean": 43.7943925234,
"line_max": 124,
"alpha_frac": 0.6058140344,
"autogenerated": false,
"ratio": 3.510498046875,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4616312081275,
"avg_score": null,
"num_lines": null
} |
from FreesteelPython import *
import FreesteelWindow
from FreesteelWindow import inspect
from coreRough import *
import sys, time, locale
cstart = time.clock()
clast = cstart
def status(msg):
global cstart, clast
print locale.format("%f", time.clock()-cstart, True), locale.format("%f", time.clock()-clast, True), msg
clast = time.clock()
if len(sys.argv) == 2:
stlname = sys.argv[1]
else:
stlname = "upstands1.stl"
status("Loading STL \"" + stlname + "\"")
surface = GSTsurface()
surface.LoadSTL(stlname)
status("Creating boundary")
boundary = surface.MakeRectBoundary(0) # NB: a PathXSeries
status("Creating parameters")
params = makeParams(retractheight=surface.zrg.hi+5.0)
zlevels = []
zcount = 3
zhi = surface.zrg.hi
zlo = surface.zrg.lo
zstep = (zhi-zlo) / zcount
z = zhi + zstep*.1 # offset by 1/10th of a step
for n in range(zcount):
z -= zstep
zlevels.append(z)
print "zlevels =", zlevels
paths = []
blpaths = []
status("Initializing CoreRougher")
cr = CoreRougher(surface, boundary, params)
for z in zlevels:
status("Generating toolpath at Z%.2f" % z)
path = PathXSeries()
blpath = PathXSeries()
cr.generateAt(z, path, blpath)
paths.append(path)
blpaths.append(blpath)
tool = ToolShape(params.toolflatrad, params.toolcornerrad, params.toolcornerrad, params.toolcornerrad / 10.0)
status("Done!")
inspect(surface, paths, boundary, tool)
| {
"repo_name": "JohnyEngine/CNC",
"path": "deprecated/libactp/freesteel/test/test-coreRough.py",
"copies": "2",
"size": "1371",
"license": "apache-2.0",
"hash": -3207159225878336500,
"line_mean": 23.4821428571,
"line_max": 109,
"alpha_frac": 0.7206418673,
"autogenerated": false,
"ratio": 2.742,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9390936460653558,
"avg_score": 0.014341081329288474,
"num_lines": 56
} |
from FreesteelPython import *
import FreesteelWindow
import sys
import string
class pp:
def __init__(self):
self.lx0 = ""
self.ly0 = ""
self.lz0 = ""
def writeheading(self, fout):
fout.write("BEGINPGM\n")
fout.write("LOADTL1\n")
def writeline(self, fout, x, y, z):
res = [ 'L' ]
sx = "X%.3f" % x
if sx != self.lx0:
res.append(sx)
self.lx0 = sx
sy = "Y%.3f" % y
if sy != self.ly0:
res.append(sy)
self.ly0 = sy
sz = "Z%.3f" % z
if sz != self.lz0:
res.append(sz)
self.lz0 = sz
if len(res) > 1:
res.append('\n')
fout.write(string.join(res, ''))
def writePath(self, fout, pathx):
j = 0
restart = 1
for i in xrange(pathx.GetNpts()):
if (j == pathx.GetNbrks()) or (i < pathx.GetBrkIndex(j)):
if restart == 1:
fout.write("//////// Begin new path ///////\n")
self.writeline(fout, pathx.GetX(i - 1), pathx.GetY(i - 1), pathx.z)
restart = 0
self.writeline(fout, pathx.GetX(i), pathx.GetY(i), pathx.z)
else:
while (j < pathx.GetNbrks()) and (i == pathx.GetBrkIndex(j)):
if pathx.GetNlnks(j) > 0:
restart == 1
fout.write("//////// Begin new link ///////\n")
for il in xrange(pathx.GetNlnks(j)):
self.writeline(fout, pathx.GetLinkX(j, il), pathx.GetLinkY(j, il), pathx.GetLinkZ(j, il))
j = j + 1
def postprocess(pathx, fname):
lpp = pp()
fout = open(fname, "w")
lpp.writeheading(fout)
fout.write("G0\n")
lpp.writePath(fout, pathx)
#for i in xrange(pathx.GetNpts()):
# lpp.writeline(fout, pathx.GetX(i), pathx.GetY(i), pathx.z)
fout.write("ENDPGM\n")
fout.close()
| {
"repo_name": "JohnyEngine/CNC",
"path": "deprecated/libactp/freesteel/test/postprocessor.py",
"copies": "2",
"size": "1601",
"license": "apache-2.0",
"hash": 3520092539490090000,
"line_mean": 21.5492957746,
"line_max": 95,
"alpha_frac": 0.5858838226,
"autogenerated": false,
"ratio": 2.3753709198813056,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.39612547424813055,
"avg_score": null,
"num_lines": null
} |
from FreesteelPython import *
import FreesteelWindow
import sys
import postprocessor
#from postprocessor import postprocess
from coreRough import coreRough
def makeRectBoundary(sx):
bdy = PathXSeries()
bdy.Add(P2(sx.gxrg.lo, sx.gyrg.lo))
bdy.Add(P2(sx.gxrg.hi, sx.gyrg.lo))
bdy.Add(P2(sx.gxrg.hi, sx.gyrg.hi))
bdy.Add(P2(sx.gxrg.lo, sx.gyrg.hi))
bdy.Add(P2(sx.gxrg.lo, sx.gyrg.lo))
bdy.z = sx.gzrg.hi + 5
bdy.Break()
return bdy
def makeParams(stepdown=15.0, toolcornerrad=3.0, toolflatrad=0.0, retractheight=50.0):
params = MachineParams()
# linking parameters
params.leadoffdz = 0.1
params.leadofflen = 1.1
params.leadoffrad = 2.0
params.retractzheight = retractheight
params.leadoffsamplestep = 0.6
# cutting parameters
params.toolcornerrad = toolcornerrad
params.toolflatrad = toolflatrad
params.samplestep = 0.4
params.stepdown = stepdown
params.clearcuspheight = params.stepdown / 3.0
# weave parameters
params.triangleweaveres = 0.51
params.flatradweaveres = 0.71
# stearing parameters
# fixed values controlling the step-forward of the tool and
# changes of direction.
params.dchangright = 0.17
params.dchangrightoncontour = 0.37
params.dchangleft = -0.41
params.dchangefreespace = -0.6
params.sidecutdisplch = 0.0
params.fcut = 1000
params.fretract = 5000
params.thintol = 0.0001
return params
mainframe = FreesteelWindow.MainFrame()
vtkwindow = mainframe.vtkwindow
surfhandle = vtkwindow.LoadSTL("mm.stl")
vtkwindow.showAll() # zoom to fit
surfx = SurfX()
vtkwindow.PushTrianglesIntoSurface(surfhandle, surfx)
surfx.BuildComponents()
boundary = makeRectBoundary(surfx)
vtkwindow.addPathxSeries(boundary)
vtkwindow.render()
params = makeParams()
params.retractheight = surfx.gzrg.hi + 2
z = surfx.gzrg.lo + 2
pathx = PathXSeries()
coreRough(pathx, surfx, boundary, params, z)
vtkwindow.addPathxSeries(pathx)
pathx2 = pathx
z = surfx.gzrg.lo + 20
coreRough(pathx2, surfx, boundary, params, z)
vtkwindow.addPathxSeries(pathx2)
vtkwindow.render()
# send to post processor
postprocessor.postprocess(pathx, "ghgh.tap")
if ('interactive' not in sys.argv):
# Running mainloop. Run interactively (-i) with argument 'interactive' to access the interactive console.
mainframe.mainloop()
| {
"repo_name": "JohnyEngine/CNC",
"path": "deprecated/libactp/freesteel/test/garston.py",
"copies": "2",
"size": "2258",
"license": "apache-2.0",
"hash": 7275262958853369000,
"line_mean": 22.5208333333,
"line_max": 106,
"alpha_frac": 0.7555358725,
"autogenerated": false,
"ratio": 2.625581395348837,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9032403633075448,
"avg_score": 0.06974272695467788,
"num_lines": 96
} |
from FreesteelPython import *
"""
Issues:
Crashes when toolflatrad != 0.0
"""
def makeParams(stepdown=15.0, toolcornerrad=3.0, toolflatrad=0.0, retractheight=50.0):
params = MachineParams()
# linking parameters
params.leadoffdz = 0.1
params.leadofflen = 1.1
params.leadoffrad = 2.0
params.retractzheight = retractheight
params.leadoffsamplestep = 0.6
# cutting parameters
params.toolcornerrad = toolcornerrad
params.toolflatrad = toolflatrad
params.samplestep = 0.4
params.stepdown = stepdown
params.clearcuspheight = params.stepdown / 3.0
# weave parameters
params.triangleweaveres = 0.51
params.flatradweaveres = 0.71
# stearing parameters
# fixed values controlling the step-forward of the tool and
# changes of direction.
params.dchangright = 0.17
params.dchangrightoncontour = 0.37
params.dchangleft = -0.41
params.dchangefreespace = -0.6
params.sidecutdisplch = 0.0
params.fcut = 1000
params.fretract = 5000
params.thintol = 0.0001
return params
def coreRough(res, sx, bound, params, z, blpaths = PathXSeries()):
# boxed surfaces
sxb = SurfXboxed(sx);
sxb.BuildBoxes(10.0);
# interior close to tool, or absolute intersections with triangle faces
areaoversize = (params.toolcornerrad + params.toolflatrad) * 2 + 13;
# these can be reused
a2g = Area2_gen()
a2g.SetShape(sx.gxrg.Inflate(areaoversize), sx.gyrg.Inflate(areaoversize), params.triangleweaveres);
a2g.SetSurfaceTop(sxb, params.toolcornerrad);
a2gfl = Area2_gen()
a2gfl.SetShape(sx.gxrg.Inflate(areaoversize), sx.gyrg.Inflate(areaoversize), params.flatradweaveres);
# make the core roughing algorithm thing
crg = CoreRoughGeneration(res, sx.gxrg.Inflate(10), sx.gyrg.Inflate(10));
# the stock boundary
crg.tsbound.Append(bound.pths);
# the material boundary weave used in the core roughing.
if params.toolflatrad != 0.0:
crg.pa2gg = a2gfl
else:
crg.pa2gg = a2g
crg.trad = params.toolcornerrad * 0.9 + params.toolflatrad; # the clearing radius
crg.setWeave(crg.pa2gg);
# hack against the surfaces
a2g.HackDowntoZ(z);
a2g.MakeContours(blpaths);
# hack by the flatrad.
if (params.toolflatrad != 0.0):
HackAreaOffset(a2gfl, blpaths, params.toolflatrad);
a2gfl.z = a2g.z;
# make it again so we can see
blpaths = PathXSeries();
a2gfl.MakeContours(blpaths);
crg.GrabberAlg(params);
class CoreRougher:
def __init__(self, surface, boundary, params):
self.surface = surface
self.params = params
self.boundary = boundary
# define the empty surface
self.sx = SurfX(surface.xrg.Inflate(2), surface.yrg.Inflate(2), surface.zrg)
surface.PushTrianglesIntoSurface(self.sx)
self.sx.BuildComponents() # compress thing
# boxed surfaces
self.sxb = SurfXboxed(self.sx);
self.sxb.BuildBoxes(10.0);
# interior close to tool, or absolute intersections with triangle faces
areaoversize = (self.params.toolcornerrad + self.params.toolflatrad) * 2 + 13;
# weaves for non-flat/flat part of tool
self.a2g = Area2_gen()
self.a2g.SetShape(self.sx.gxrg.Inflate(areaoversize), self.sx.gyrg.Inflate(areaoversize), self.params.triangleweaveres);
self.a2g.SetSurfaceTop(self.sxb, params.toolcornerrad);
if (self.params.toolflatrad != 0.0):
self.a2gfl = Area2_gen()
self.a2gfl.SetShape(self.sx.gxrg.Inflate(areaoversize), self.sx.gyrg.Inflate(areaoversize), self.params.flatradweaveres);
def generateAt(self, z, path, blpaths ):
# make the core roughing algorithm thing
crg = CoreRoughGeneration(path, self.sx.gxrg.Inflate(10), self.sx.gyrg.Inflate(10));
# the stock boundary
crg.tsbound.Append(self.boundary.pths);
# the material boundary weave used in the core roughing.
if (self.params.toolflatrad != 0.0):
crg.pa2gg = self.a2gfl
else:
crg.pa2gg = self.a2g
crg.trad = self.params.toolcornerrad * 0.9 + self.params.toolflatrad; # the clearing radius
crg.setWeave(crg.pa2gg);
# hack against the surfaces
self.a2g.HackDowntoZ(z);
self.a2g.MakeContours(blpaths);
# hack by the flatrad.
if (self.params.toolflatrad != 0.0):
HackAreaOffset(self.a2gfl, blpaths, self.params.toolflatrad);
self.a2gfl.z = self.a2g.z;
# make it again so we can see
blpaths = PathXSeries();
self.a2gfl.MakeContours(blpaths);
# generate toolpath
crg.GrabberAlg(self.params);
| {
"repo_name": "JohnyEngine/CNC",
"path": "deprecated/libactp/freesteel/test/coreRough.py",
"copies": "2",
"size": "4325",
"license": "apache-2.0",
"hash": -1690465699692290300,
"line_mean": 29.2447552448,
"line_max": 124,
"alpha_frac": 0.7255491329,
"autogenerated": false,
"ratio": 2.5975975975975976,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.43231467304975973,
"avg_score": null,
"num_lines": null
} |
from freetype import Face
import numpy as np
from pylab import axis, close, figure, plot, show
def unpackCharacter(glyph):
outline = glyph.outline
points = np.array(outline.points, dtype=float)
# Find the start and stop points for each loop in the segment list
starts = [0] + [s + 1 for s in outline.contours[:-1]]
ends = list(outline.contours)
loopSets = []
# Iterate over the loops
for start, end in zip(starts, ends):
points = outline.points[start: end + 1]
tags = outline.tags[start:end + 1]
points.append(points[0])
tags.append(tags[0])
segments = [[]]
for n, (pt, tg) in enumerate(zip(points, tags)):
segments[-1].append(pt)
if n == 0:
continue
newSegmentFlag = (tg & 1)
notTheEndPoint = n < (len(points) - 1)
if newSegmentFlag and notTheEndPoint:
segments.append([pt, ])
mySegmentList = []
for segment in segments:
if len(segment) in [2, 3]:
mySegmentList.append(segment)
else:
mySegmentList.extend(breakPackedSplineIntoBezier(segment))
loopSets.append(mySegmentList)
return loopSets
def breakPackedSplineIntoBezier(segmentData):
returnSegments = []
for i in range(len(segmentData)):
a, c, e = segmentData[i:i + 3]
b = ((a[0] + c[0]) / 2.0, (a[1] + c[1]) / 2.0)
d = ((c[0] + e[0]) / 2.0, (c[1] + e[1]) / 2.0)
# First segment
if i == 0:
returnSegments.append((a, c, d))
continue
# Last Segment
if i == len(segmentData) - 3:
returnSegments.append((b, c, e))
break
# All the middle segments
returnSegments.append((b, c, d))
return returnSegments
def computeQuadBezier(pt1, pt2, pt3, nPoints=4):
t = np.linspace(0, 1, nPoints)
xs = ((1 - t) ** 2 * pt1[0]) + (2 * (1 - t) * t * pt2[0]) + (t ** 2 * pt3[0])
ys = ((1 - t) ** 2 * pt1[1]) + (2 * (1 - t) * t * pt2[1]) + (t ** 2 * pt3[1])
return xs, ys
def plotQuadBezier(pt1, pt2, pt3, nPoints=10):
xs, ys = computeQuadBezier(pt1, pt2, pt3, nPoints)
plot(xs, ys, "g", linewidth=5, alpha=0.3)
def plotLoopSets(loopSets):
for loop in loopSets:
for seg in loop:
if len(seg) == 2:
plot([seg[0][0], seg[1][0]], [seg[0][1], seg[1][1]], 'b', linewidth=5, alpha=0.3)
if len(seg) == 3:
A, B, C = seg
plot([A[0], B[0], C[0]], [A[1], B[1], C[1]], 'r:', linewidth=2)
plotQuadBezier(*tuple(seg))
def loopToPolygon(loop, bezN=10):
pts = []
for segment in loop:
if len(segment) == 2:
pts.extend(segment)
if len(segment) == 3:
[pts.append(x, y) for x, y in computeQuadBezier(*tuple(segment), nPoints=bezN)]
assert pts[0] == pts[-1]
return pts
def loopsToPolygons(loopSets):
return [loopToPolygon(loop) for loop in loopSets]
def shiftLoopSet(loopSets, xS, yS):
newLoopSet = []
for loop in loopSets:
newLoop = []
for seg in loop:
newSeg = [(x + xS, y + yS) for x, y in seg]
newLoop.append(newSeg)
newLoopSet.append(newLoop)
return newLoopSet
def plotTextString(stringToPlot, kerning=False, startXY=(0,0)):
fontPath = "/home/meawoppl/Dropbox/repos/babyfood/cmr10.pfb"
typeFace = Face(fontPath)
typeFace.attach_file("/home/meawoppl/Dropbox/repos/babyfood/cmr10.pfm")
typeFace.set_char_size(48 * 64)
figure()
startX, startY = startXY
for n, char in enumerate(stringToPlot):
typeFace.load_char(char)
loopz = unpackCharacter(typeFace.glyph)
loopz = shiftLoopSet(loopz, startX, startY)
startX += typeFace.glyph.advance.x
startY += typeFace.glyph.advance.y
if kerning and (n != 0):
kv = typeFace.get_kerning(char, stringToPlot[n-1])
print(char, stringToPlot[n-1])
print(kv.x, kv.y)
print(dir(kv))
startX += kv.x
plotLoopSets(loopz)
axis("equal")
show()
close()
class TextFeature(SingleLayerFeature):
def __init__(self, layerName, text):
SingleLayerFeature.__init__(self, layerName)
self.text = text
def artist(self, gerberWriter):
gerberWriter
if __name__ == "__main__":
plotTextString("The quick brown fox jumped over the lazy dogs.", kerning = True)
| {
"repo_name": "meawoppl/babyfood",
"path": "babyfood/features/FontsExtractor.py",
"copies": "1",
"size": "4539",
"license": "bsd-2-clause",
"hash": 271112771825512130,
"line_mean": 29.26,
"line_max": 97,
"alpha_frac": 0.558713373,
"autogenerated": false,
"ratio": 3.2032462949894143,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9254117271413134,
"avg_score": 0.0015684793152562072,
"num_lines": 150
} |
from freezegame.sprite import Sprite
class Particle(Sprite):
def __init__(self, pos, state, image_name, image_region, batch, group, life_time=1.0, vel=[0.0, 0.0],
acc=[0.0, 0.0], fric=0.0, rot=0.0, rot_vel=0.0, rot_acc=0.0, rot_fric=0.0, fade=False, fade_in=False):
self.pos = pos
self.vel = vel
self.acc = acc
self.fric = fric
self.rot = rot
self.rot_vel = rot_vel
self.rot_acc = rot_acc
self.rot_fric = rot_fric
self.life_time = life_time
self.total_life = self.life_time
self.dead = False
self.keep = False
self.fade = fade
self.fadeIn = fade_in
self.max_v = state.maxVX
Sprite.__init__(self, pos[0], pos[1], [0, 0, 32, 32], state, image_name, image_region, batch, group)
self.physicalToSprites = False
self.physicalToWalls = False
self.sprite.image.anchor_x = image_region[2] / 2 # (self.sprite.width * self.absolute_scale)/2
self.sprite.image.anchor_y = image_region[3] / 2 # (self.sprite.height * self.absolute_scale)/2
def update(self, dt, keys, state):
self.life_time -= dt
if self.life_time <= 0:
self.dead = True
return
self.vel[0] += self.acc[0] * dt
self.vel[1] += self.acc[1] * dt
if self.vel[0] > 0:
self.vel[0] -= self.fric * dt
if self.vel[0] < 0:
self.vel[0] = 0
if self.vel[0] < 0:
self.vel[0] += self.fric * dt
if self.vel[0] > 0:
self.vel[0] = 0
if self.vel[1] > 0:
self.vel[1] -= self.fric * dt
if self.vel[1] < 0:
self.vel[1] = 0
if self.vel[1] < 0:
self.vel[1] += self.fric * dt
if self.vel[1] > 0:
self.vel[1] = 0
if self.max_v is not None:
if self.vel[0] > self.max_v:
self.vel[0] = self.max_v
if self.vel[0] < -self.max_v:
self.vel[0] = -self.max_v
if self.vel[1] > self.max_v:
self.vel[1] = self.max_v
if self.vel[1] < -self.max_v:
self.vel[1] = -self.max_v
self.pos[0] += self.vel[0] * dt
self.pos[1] += self.vel[1] * dt
self.rot += self.rot_vel * dt
self.rot_vel += self.rot_acc * dt
if self.rot_vel > 0:
self.rot_vel -= self.rot_fric * dt
if self.rot_vel < 0:
self.rot_vel = 0
if self.rot_vel < 0:
self.rot_vel += self.rot_fric * dt
if self.rot_vel > 0:
self.rot_vel = 0
if self.fadeIn:
if self.life_time > self.total_life / 2.0:
self.sprite.opacity = int(255.0 * (1.0 - float(self.life_time / 2.0) / (float(self.total_life) / 2.0)))
if self.fade:
if self.life_time < self.total_life / 2.0:
self.sprite.opacity = int(255.0 * float(self.life_time / 2.0) / (float(self.total_life) / 2.0))
self.sprite.rotation = self.rot
self.x = self.pos[0]
self.y = self.pos[1]
self.sprite.x = self.x
self.sprite.y = self.y
# PybaconSprite.update(self, dt, keys, state)
| {
"repo_name": "mattfister/freezegame",
"path": "freezegame/particle.py",
"copies": "1",
"size": "3311",
"license": "mit",
"hash": -4635647073956852000,
"line_mean": 30.5333333333,
"line_max": 119,
"alpha_frac": 0.4919963757,
"autogenerated": false,
"ratio": 3.00453720508167,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.399653358078167,
"avg_score": null,
"num_lines": null
} |
from freeze_globals import freeze_globals
@freeze_globals
def too_many_constants():
x = 0
x = 1
x = 2
x = 3
x = 4
x = 5
x = 6
x = 7
x = 8
x = 9
x = 10
x = 11
x = 12
x = 13
x = 14
x = 15
x = 16
x = 17
x = 18
x = 19
x = 20
x = 21
x = 22
x = 23
x = 24
x = 25
x = 26
x = 27
x = 28
x = 29
x = 30
x = 31
x = 32
x = 33
x = 34
x = 35
x = 36
x = 37
x = 38
x = 39
x = 40
x = 41
x = 42
x = 43
x = 44
x = 45
x = 46
x = 47
x = 48
x = 49
x = 50
x = 51
x = 52
x = 53
x = 54
x = 55
x = 56
x = 57
x = 58
x = 59
x = 60
x = 61
x = 62
x = 63
x = 64
x = 65
x = 66
x = 67
x = 68
x = 69
x = 70
x = 71
x = 72
x = 73
x = 74
x = 75
x = 76
x = 77
x = 78
x = 79
x = 80
x = 81
x = 82
x = 83
x = 84
x = 85
x = 86
x = 87
x = 88
x = 89
x = 90
x = 91
x = 92
x = 93
x = 94
x = 95
x = 96
x = 97
x = 98
x = 99
x = 100
x = 101
x = 102
x = 103
x = 104
x = 105
x = 106
x = 107
x = 108
x = 109
x = 110
x = 111
x = 112
x = 113
x = 114
x = 115
x = 116
x = 117
return x
if __name__ == '__main__':
too_many_constants()
| {
"repo_name": "ssanderson/pycon-2016",
"path": "snippets/freeze_too_many_constants.py",
"copies": "1",
"size": "1459",
"license": "cc0-1.0",
"hash": 8096665092095175000,
"line_mean": 10.3984375,
"line_max": 41,
"alpha_frac": 0.3173406443,
"autogenerated": false,
"ratio": 2.833009708737864,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8650350353037863,
"avg_score": 0,
"num_lines": 128
} |
from freezegun import freeze_time
from django.test import TestCase, override_settings
from django.contrib.gis.geos import Point
from django.core.exceptions import ValidationError
from accounts.models import User
from busstops.models import DataSource, Region, Operator, Service
from .models import (Vehicle, VehicleType, VehicleFeature, Livery,
VehicleJourney, VehicleLocation, VehicleEdit)
@override_settings(CELERY_BROKER_URL='redis://localhost:69')
class VehiclesTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.datetime = '2018-12-25 19:47+00:00'
source = DataSource.objects.create(name='HP', datetime=cls.datetime)
ea = Region.objects.create(id='EA', name='East Anglia')
cls.wifi = VehicleFeature.objects.create(name='Wi-Fi')
cls.usb = VehicleFeature.objects.create(name='USB')
cls.bova = Operator.objects.create(region=ea, name='Bova and Over', id='BOVA', slug='bova-and-over',
parent='Madrigal Electromotive')
cls.lynx = Operator.objects.create(region=ea, name='Lynx', id='LYNX', slug='lynx',
parent='Madrigal Electromotive')
tempo = VehicleType.objects.create(name='Optare Tempo', coach=False, double_decker=False)
spectra = VehicleType.objects.create(name='Optare Spectra', coach=False, double_decker=True)
service = Service.objects.create(service_code='49', region=ea, date='2018-12-25', tracking=True,
description='Spixworth - Hunworth - Happisburgh')
service.operator.add(cls.lynx)
service.operator.add(cls.bova)
cls.vehicle_1 = Vehicle.objects.create(code='2', fleet_number=1, reg='FD54JYA', vehicle_type=tempo,
colours='#FF0000', notes='Trent Barton', operator=cls.lynx,
data={'Depot': 'Holt'})
livery = Livery.objects.create(colours='#FF0000 #0000FF')
cls.vehicle_2 = Vehicle.objects.create(code='50', fleet_number=50, reg='UWW2X', livery=livery,
vehicle_type=spectra, operator=cls.lynx, data={'Depot': 'Long Sutton'})
cls.journey = VehicleJourney.objects.create(vehicle=cls.vehicle_1, datetime=cls.datetime, source=source,
service=service, route_name='2')
cls.location = VehicleLocation.objects.create(datetime=cls.datetime, latlong=Point(0, 51),
journey=cls.journey, current=True)
cls.vehicle_1.latest_location = cls.location
cls.vehicle_1.save()
cls.vehicle_1.features.set([cls.wifi])
cls.user = User.objects.create(username='josh', is_staff=True, is_superuser=True)
def test_parent(self):
response = self.client.get('/groups/Madrigal Electromotive/vehicles')
self.assertContains(response, 'Lynx')
self.assertContains(response, 'Madrigal Electromotive')
self.assertContains(response, 'Optare')
def test_vehicle(self):
vehicle = Vehicle(reg='3990ME')
self.assertEqual(str(vehicle), '3990\xa0ME')
self.assertIn('search/?text=3990ME%20or%20%223990%20ME%22&sort', vehicle.get_flickr_url())
vehicle.reg = 'J122018'
self.assertEqual(str(vehicle), 'J122018')
self.assertTrue(vehicle.editable())
vehicle.notes = 'Spare ticket machine'
self.assertEqual('', vehicle.get_flickr_link())
self.assertFalse(vehicle.editable())
vehicle = Vehicle(code='RML2604')
self.assertIn('search/?text=RML2604&sort', vehicle.get_flickr_url())
vehicle.operator = Operator(name='Lynx')
self.assertIn('search/?text=Lynx%20RML2604&sort', vehicle.get_flickr_url())
vehicle.fleet_number = '11111'
self.assertIn('search/?text=Lynx%2011111&sort', vehicle.get_flickr_url())
vehicle.reg = 'YN69GHA'
vehicle.operator.parent = 'Stagecoach'
vehicle.fleet_number = '11111'
self.assertIn('search/?text=YN69GHA%20or%20%22YN69%20GHA%22%20or%20Stagecoach%2011111&sort',
vehicle.get_flickr_url())
vehicle.code = 'YN_69_GHA'
self.assertFalse(vehicle.fleet_number_mismatch())
vehicle.code = 'YN19GHA'
self.assertTrue(vehicle.fleet_number_mismatch())
def test_fleet_lists(self):
with self.assertNumQueries(2):
response = self.client.get('/operators/bova-and-over/vehicles')
self.assertEqual(404, response.status_code)
self.assertFalse(str(response.context['exception']))
# last seen today - should only show time
with freeze_time('2018-12-25 19:50+00:00'):
with self.assertNumQueries(4):
response = self.client.get('/operators/lynx/vehicles')
self.assertNotContains(response, '25 Dec')
self.assertContains(response, '19:47')
# last seen today - should only show time
with freeze_time('2018-12-26 12:00+00:00'):
with self.assertNumQueries(4):
response = self.client.get('/operators/lynx/vehicles')
self.assertContains(response, '25 Dec 19:47')
self.assertTrue(response.context['code_column'])
self.assertContains(response, '<td class="number">2</td>')
def test_vehicle_views(self):
with self.assertNumQueries(8):
response = self.client.get(self.vehicle_1.get_absolute_url() + '?date=poop')
self.assertContains(response, 'Optare Tempo')
self.assertContains(response, 'Trent Barton')
self.assertContains(response, '#FF0000')
with self.assertNumQueries(7):
response = self.client.get(self.vehicle_2.get_absolute_url())
self.assertEqual(200, response.status_code)
with self.assertNumQueries(0):
response = self.client.get('/journeys/1.json')
self.assertEqual([], response.json())
def test_livery(self):
livery = Livery(name='Go-Coach')
self.assertEqual('Go-Coach', str(livery))
self.assertIsNone(livery.preview())
livery.colours = '#7D287D #FDEE00 #FDEE00'
livery.horizontal = True
self.assertEqual('<div style="height:1.5em;width:2.25em;background:linear-gradient' +
'(to top,#7D287D 34%,#FDEE00 34%)" title="Go-Coach"></div>', livery.preview())
livery.horizontal = False
livery.angle = 45
self.assertEqual('linear-gradient(45deg,#7D287D 34%,#FDEE00 34%)', livery.get_css())
self.assertEqual('linear-gradient(315deg,#7D287D 34%,#FDEE00 34%)', livery.get_css(10))
self.assertEqual('linear-gradient(45deg,#7D287D 34%,#FDEE00 34%)', livery.get_css(300))
livery.angle = None
self.vehicle_1.livery = livery
self.assertEqual('linear-gradient(to left,#7D287D 34%,#FDEE00 34%)',
self.vehicle_1.get_livery(179))
self.assertIsNone(self.vehicle_1.get_text_colour())
self.vehicle_1.livery.colours = '#c0c0c0'
self.assertEqual('#c0c0c0', self.vehicle_1.get_livery(200))
livery.css = 'linear-gradient(45deg,#ED1B23 35%,#fff 35%,#fff 45%,#ED1B23 45%)'
self.assertEqual(livery.get_css(), 'linear-gradient(45deg,#ED1B23 35%,#fff 35%,#fff 45%,#ED1B23 45%)')
self.assertEqual(livery.get_css(0), 'linear-gradient(315deg,#ED1B23 35%,#fff 35%,#fff 45%,#ED1B23 45%)')
self.assertEqual(livery.get_css(10), 'linear-gradient(315deg,#ED1B23 35%,#fff 35%,#fff 45%,#ED1B23 45%)')
self.assertEqual(livery.get_css(180), 'linear-gradient(45deg,#ED1B23 35%,#fff 35%,#fff 45%,#ED1B23 45%)')
self.assertEqual(livery.get_css(181), 'linear-gradient(45deg,#ED1B23 35%,#fff 35%,#fff 45%,#ED1B23 45%)')
def test_vehicle_edit_1(self):
self.client.force_login(self.user)
url = self.vehicle_1.get_absolute_url() + '/edit'
with self.assertNumQueries(13):
response = self.client.get(url)
self.assertNotContains(response, 'already')
self.assertContains(response, '<option value="Long Sutton">Long Sutton</option>', html=True)
initial = {
'fleet_number': '1',
'reg': 'FD54JYA',
'vehicle_type': self.vehicle_1.vehicle_type_id,
'features': self.wifi.id,
'operator': self.lynx.id,
'colours': '#FF0000',
'other_colour': '#ffffff',
'notes': 'Trent Barton',
'depot': 'Holt'
}
# edit nothing
with self.assertNumQueries(16):
response = self.client.post(url, initial)
self.assertFalse(response.context['form'].has_changed())
self.assertNotContains(response, 'already')
# edit fleet number
initial['fleet_number'] = '2'
with self.assertNumQueries(13):
response = self.client.post(url, initial)
self.assertIsNone(response.context['form'])
self.assertContains(response, 'I’ll update those details')
edit = VehicleEdit.objects.filter(approved=None).get()
self.assertEqual(edit.colours, '')
self.assertEqual(edit.get_changes(), {
'fleet_number': '2'
})
# # edit reg, colour
# with self.assertNumQueries(14):
# response = self.client.post(url, {
# 'fleet_number': '1',
# 'reg': 'K292JVF',
# 'vehicle_type': self.vehicle_1.vehicle_type_id,
# 'features': self.wifi.id,
# 'operator': self.lynx.id,
# 'colours': 'Other',
# 'other_colour': '#ffffff',
# 'notes': 'Trent Barton',
# })
# self.assertIsNone(response.context['form'])
# self.assertContains(response, 'I’ll update the other details')
# self.assertEqual(2, VehicleEdit.objects.filter(approved=None).count())
# response = self.client.get('/admin/vehicles/vehicleedit/')
# self.assertContains(response, 'Lynx (2)')
# self.assertContains(response, '127.0.0.1 (2)')
# self.assertContains(response, 'Wi-Fi')
# self.assertNotContains(response, '<del>Wi-Fi</del>')
# edit type, livery and name with bad URL
initial['vehicle_type'] = self.vehicle_2.vehicle_type_id
initial['colours'] = self.vehicle_2.livery_id
initial['name'] = 'Colin'
initial['url'] = 'http://localhost'
with self.assertNumQueries(16):
response = self.client.post(url, initial)
self.assertTrue(response.context['form'].has_changed())
self.assertContains(response, 'That URL does')
self.assertContains(response, '/edit-vehicle.')
# # edit type, livery, name and feature
# with self.assertNumQueries(16):
# response = self.client.post(url, {
# 'fleet_number': '1',
# 'reg': 'K292JVF',
# 'vehicle_type': self.vehicle_2.vehicle_type_id,
# 'features': self.usb.id,
# 'operator': self.lynx.id,
# 'colours': self.vehicle_2.livery_id,
# 'other_colour': '#ffffff',
# 'notes': 'Trent Barton',
# 'name': 'Colin',
# 'url': 'https://bustimes.org'
# })
# self.assertIsNone(response.context['form'])
# self.assertContains(response, 'I’ll update those details')
# self.assertNotContains(response, '/edit-vehicle.')
# edit = VehicleEdit.objects.last()
# self.assertEqual(edit.url, 'https://bustimes.org')
# self.assertEqual(str(edit.get_changes()), "{'vehicle_type': 'Optare Spectra', 'name': 'Colin', 'features': \
# [<VehicleEditFeature: <del>Wi-Fi</del>>, <VehicleEditFeature: <ins>USB</ins>>]}")
# response = self.client.get('/admin/vehicles/vehicleedit/')
# self.assertContains(response, '<del>Wi-Fi</del>')
# should not create an edit
with self.assertNumQueries(16):
initial['colours'] = '#FFFF00'
response = self.client.post(url, initial)
self.assertTrue(response.context['form'].has_changed())
self.assertContains(response, 'Select a valid choice. #FFFF00 is not one of the available choices')
self.assertContains(response, 'already')
# self.assertEqual(3, VehicleEdit.objects.filter(approved=None).count())
# with self.assertNumQueries(12):
# admin.apply_edits(VehicleEdit.objects.select_related('vehicle'))
# self.assertEqual(0, VehicleEdit.objects.filter(approved=None).count())
# vehicle = Vehicle.objects.get(notes='Trent Barton')
# self.assertEqual(vehicle.reg, 'K292JVF')
# self.assertEqual(vehicle.name, 'Colin')
# self.assertEqual(self.usb, vehicle.features.get())
# self.assertEqual(str(vehicle.vehicle_type), 'Optare Spectra')
# self.assertEqual(vehicle.fleet_number, 2)
# with self.assertNumQueries(10):
# response = self.client.get('/admin/vehicles/vehicleedit/?username=1')
# self.assertNotContains(response, 'Lynx')
# self.assertEqual(3, response.context_data['cl'].result_count)
# response = self.client.get('/admin/vehicles/vehicleedit/?change=colours')
# self.assertEqual(2, response.context_data['cl'].result_count)
# response = self.client.get('/admin/vehicles/vehicleedit/?change=changes__Depot')
# self.assertEqual(0, response.context_data['cl'].result_count)
# response = self.client.get('/admin/vehicles/vehicleedit/?change=reg')
# self.assertEqual(0, response.context_data['cl'].result_count)
def test_vehicle_edit_2(self):
self.client.force_login(self.user)
url = self.vehicle_2.get_absolute_url() + '/edit'
initial = {
'fleet_number': '50',
'reg': 'UWW2X',
'vehicle_type': self.vehicle_2.vehicle_type_id,
'operator': self.lynx.id,
'colours': self.vehicle_2.livery_id,
'other_colour': '#ffffff',
'notes': '',
'depot': 'Long Sutton'
}
with self.assertNumQueries(15):
response = self.client.post(url, initial)
self.assertTrue(response.context['form'].fields['fleet_number'].disabled)
self.assertFalse(response.context['form'].has_changed())
self.assertNotContains(response, 'already')
self.assertEqual(0, VehicleEdit.objects.count())
self.assertNotContains(response, '/operators/bova-and-over')
initial['notes'] = 'Ex Ipswich Buses'
initial['depot'] = 'Holt'
initial['name'] = 'Luther Blisset'
initial['branding'] = 'Coastliner'
with self.assertNumQueries(14):
initial['operator'] = self.bova.id
initial['reg'] = ''
response = self.client.post(url, initial)
self.assertIsNone(response.context['form'])
# check vehicle operator has been changed
self.assertContains(response, '/operators/bova-and-over')
self.assertContains(response, 'Changed operator from Lynx to Bova and Over')
self.assertContains(response, 'Changed depot from Long Sutton to Holt')
self.assertContains(response, '<p>I’ll update the other details shortly</p>')
response = self.client.get('/vehicles/history')
self.assertContains(response, '<td>operator</td>')
self.assertContains(response, '<td>Lynx</td>')
self.assertContains(response, '<td>Bova and Over</td>')
revision = response.context['revisions'][0]
self.assertEqual(revision.from_operator, self.lynx)
self.assertEqual(revision.to_operator, self.bova)
self.assertEqual(str(revision), 'operator: Lynx → Bova and Over, depot: Long Sutton → Holt')
response = self.client.get(f'{self.vehicle_2.get_absolute_url()}/history')
self.assertContains(response, '<td>operator</td>')
self.assertContains(response, '<td>Lynx</td>')
self.assertContains(response, '<td>Bova and Over</td>')
with self.assertNumQueries(13):
response = self.client.get(url)
self.assertContains(response, 'already')
# edit = VehicleEdit.objects.get()
# self.assertEqual(edit.get_changes(), {'branding': 'Coastliner', 'name': 'Luther Blisset',
# 'notes': 'Ex Ipswich Buses'})
# self.assertTrue(str(edit).isdigit())
# self.assertEqual(self.vehicle_2.get_absolute_url(), edit.get_absolute_url())
# self.assertTrue(admin.VehicleEditAdmin.flickr(None, edit))
# self.assertEqual(admin.fleet_number(edit), '50')
# # self.assertEqual(admin.reg(edit), '<del>UWW2X</del>')
# self.assertEqual(admin.notes(edit), '<ins>Ex Ipswich Buses</ins>')
# self.assertEqual(str(admin.vehicle_type(edit)), 'Optare Spectra')
# edit.vehicle_type = 'Ford Transit'
# self.assertEqual(str(admin.vehicle_type(edit)), '<del>Optare Spectra</del><br><ins>Ford Transit</ins>')
# edit.vehicle.vehicle_type = None
# self.assertEqual(admin.vehicle_type(edit), '<ins>Ford Transit</ins>')
def test_vehicles_edit(self):
self.client.force_login(self.user)
with self.assertNumQueries(12):
response = self.client.post('/operators/lynx/vehicles/edit')
self.assertContains(response, 'Select vehicles to update')
self.assertFalse(VehicleEdit.objects.all())
with self.assertNumQueries(17):
response = self.client.post('/operators/lynx/vehicles/edit', {
'vehicle': self.vehicle_1.id,
'operator': self.lynx.id,
'notes': 'foo'
})
self.assertContains(response, 'I’ll update those details (1 vehicle) shortly')
edit = VehicleEdit.objects.get()
self.assertEqual(edit.vehicle_type, '')
self.assertEqual(edit.notes, 'foo')
self.assertContains(response, 'FD54\xa0JYA')
# just updating operator should not create a VehicleEdit, but update the vehicle immediately
with self.assertNumQueries(19):
response = self.client.post('/operators/lynx/vehicles/edit', {
'vehicle': self.vehicle_1.id,
'operator': self.bova.id,
})
self.assertNotContains(response, 'FD54\xa0JYA')
self.vehicle_1.refresh_from_db()
self.assertEqual(self.bova, self.vehicle_1.operator)
self.assertContains(response, '1 vehicle updated')
self.assertEqual(1, VehicleEdit.objects.count())
def test_vehicles_json(self):
with freeze_time(self.datetime):
with self.assertNumQueries(1):
response = self.client.get('/vehicles.json?ymax=52&xmax=2&ymin=51&xmin=1')
self.assertEqual(200, response.status_code)
self.assertEqual({'type': 'FeatureCollection', 'features': []}, response.json())
self.assertIsNone(response.get('last-modified'))
with self.assertNumQueries(1):
response = self.client.get('/vehicles.json')
features = response.json()['features']
self.assertEqual(features[0]['properties']['vehicle']['name'], '1 - FD54\xa0JYA')
self.assertEqual(features[0]['properties']['service'],
{'line_name': '', 'url': '/services/spixworth-hunworth-happisburgh'})
# self.assertEqual(response.get('last-modified'), 'Tue, 25 Dec 2018 19:47:00 GMT')
VehicleJourney.objects.update(service=None)
with self.assertNumQueries(1):
response = self.client.get('/vehicles.json')
features = response.json()['features']
self.assertEqual(features[0]['properties']['vehicle']['name'], '1 - FD54\xa0JYA')
self.assertEqual(features[0]['properties']['service'], {'line_name': '2'})
def test_location_json(self):
location = VehicleLocation.objects.get()
location.journey.vehicle = self.vehicle_2
properties = location.get_json()['properties']
vehicle = properties['vehicle']
self.assertEqual(vehicle['name'], '50 - UWW\xa02X')
self.assertEqual(vehicle['text_colour'], '#fff')
self.assertFalse(vehicle['coach'])
self.assertTrue(vehicle['decker'])
self.assertEqual(vehicle['livery'], 'linear-gradient(to right,#FF0000 50%,#0000FF 50%)')
self.assertNotIn('type', vehicle)
self.assertNotIn('operator', properties)
properties = location.get_json(True)['properties']
vehicle = properties['vehicle']
self.assertTrue(vehicle['decker'])
self.assertFalse(vehicle['coach'])
self.assertNotIn('operator', vehicle)
self.assertEqual(properties['operator'], 'Lynx')
def test_validation(self):
vehicle = Vehicle(colours='ploop')
with self.assertRaises(ValidationError):
vehicle.clean()
vehicle.colours = ''
vehicle.clean()
def test_big_map(self):
with self.assertNumQueries(0):
self.client.get('/map')
def test_vehicles(self):
with self.assertNumQueries(1):
self.client.get('/vehicles')
def test_journey_detail(self):
with self.assertNumQueries(2):
response = self.client.get(f'/journeys/{self.journey.id}')
self.assertContains(response, '<th colspan="2"></th><th>Timetable</th><th>Live</th></tr>')
def test_location_detail(self):
with self.assertNumQueries(1):
response = self.client.get(f'/vehicles/locations/{self.location.id}')
self.assertContains(response, '<a href="/services/spixworth-hunworth-happisburgh"> </a>', html=True)
def test_service_vehicle_history(self):
with self.assertNumQueries(4):
response = self.client.get('/services/spixworth-hunworth-happisburgh/vehicles?date=poop')
self.assertContains(response, 'Vehicles')
self.assertContains(response, '/vehicles/')
self.assertContains(response, '<option selected value="2018-12-25">Tuesday 25 December 2018</option>')
self.assertContains(response, '1 - FD54\xa0JYA')
with self.assertNumQueries(4):
response = self.client.get('/services/spixworth-hunworth-happisburgh/vehicles?date=2004-04-04')
self.assertNotContains(response, '1 - FD54\xa0JYA')
| {
"repo_name": "jclgoodwin/bustimes.org.uk",
"path": "vehicles/tests.py",
"copies": "1",
"size": "22672",
"license": "mpl-2.0",
"hash": 1173538754752949500,
"line_mean": 45.2408163265,
"line_max": 118,
"alpha_frac": 0.6133815871,
"autogenerated": false,
"ratio": 3.6456958970233306,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9752151256400909,
"avg_score": 0.0013852455444842641,
"num_lines": 490
} |
from freezegun import freeze_time
from hamcrest import assert_that, has_item, has_entries, \
has_length, contains, has_entry, is_
from mock import Mock, patch
from nose.tools import assert_raises
from backdrop.core import data_set
from backdrop.core.query import Query
from backdrop.core.timeseries import WEEK, MONTH
from tests.support.test_helpers import d, d_tz, match
def mock_database(mock_repository):
mock_database = Mock()
mock_database.get_repository.return_value = mock_repository
return mock_database
def mock_repository():
mock_repository = Mock()
mock_repository.find.return_value = []
mock_repository.group.return_value = []
return mock_repository
class BaseDataSetTest(object):
def setup_config(self, additional_config={}):
self.mock_storage = Mock()
base_config = {
'name': 'test_data_set',
'data_group': 'group',
'data_type': 'type',
'capped_size': 0,
}
self.data_set_config = dict(base_config.items() + additional_config.items())
self.data_set = data_set.DataSet(
self.mock_storage, self.data_set_config)
def setUp(self):
self.setup_config()
class TestNewDataSet_attributes(BaseDataSetTest):
def test_seconds_out_of_date_returns_none_or_int(self):
self.mock_storage.get_last_updated.return_value = None
assert_that(self.data_set.get_seconds_out_of_date(), is_(None))
self.mock_storage.get_last_updated.return_value = d_tz(2014, 7, 1)
assert_that(self.data_set.get_seconds_out_of_date(), is_(int))
def test_seconds_out_of_date_shows_correct_number_of_seconds_out_of_date(self):
with freeze_time('2014-01-28'):
# We expect it to be 0 seconds out of date
self.setup_config({'max_age_expected': int(0)})
# But it's a day out of date, so it should be 1day's worth of seconds out of date
self.mock_storage.get_last_updated.return_value = d_tz(2014, 1, 27)
assert_that(self.data_set.get_seconds_out_of_date(), is_(86400))
with freeze_time('2014-01-28'):
# We expect it to be a day out of date
self.setup_config({'max_age_expected': int(86400)})
self.mock_storage.get_last_updated.return_value = d_tz(2014, 1, 25)
# It's three days out, so we should get 2 days past sell by date
assert_that(self.data_set.get_seconds_out_of_date(), is_(172800))
class TestDataSet_store(BaseDataSetTest):
schema = {
"$schema": "http://json-schema.org/schema#",
"title": "Timestamps",
"type": "object",
"properties": {
"_timestamp": {
"description": "An ISO8601 formatted date time",
"type": "string",
"format": "date-time"
}
},
"required": ["_timestamp"]
}
def test_storing_a_simple_record(self):
self.data_set.store([{'foo': 'bar'}])
self.mock_storage.save_record.assert_called_with(
'test_data_set', {'foo': 'bar'})
def test_id_gets_automatically_generated_if_auto_ids_are_set(self):
self.setup_config({'auto_ids': ['foo']})
self.data_set.store([{'foo': 'bar'}])
self.mock_storage.save_record.assert_called_with(
'test_data_set', match(has_entry('_id', 'YmFy')))
def test_timestamp_gets_parsed(self):
"""Test that timestamps get parsed
For unit tests on timestamp parsing, including failure modes,
see the backdrop.core.records module
"""
self.data_set.store([{'_timestamp': '2012-12-12T00:00:00+00:00'}])
self.mock_storage.save_record.assert_called_with(
'test_data_set',
match(has_entry('_timestamp', d_tz(2012, 12, 12))))
def test_record_gets_validated(self):
errors = self.data_set.store([{'_foo': 'bar'}])
assert_that(len(errors), 1)
def test_each_record_gets_validated_further_when_schema_given(self):
self.setup_config({'schema': self.schema})
errors = self.data_set.store([{"_timestamp": "2014-06-12T00:00:00+0000"}, {'foo': 'bar'}])
assert_that(
len(filter(
lambda error: "'_timestamp' is a required property" in error,
errors)),
1
)
def test_period_keys_are_added(self):
self.data_set.store([{'_timestamp': '2012-12-12T00:00:00+00:00'}])
self.mock_storage.save_record.assert_called_with(
'test_data_set',
match(has_entry('_day_start_at', d_tz(2012, 12, 12))))
@patch('backdrop.core.storage.mongo.MongoStorageEngine.save_record')
@patch('backdrop.core.records.add_period_keys')
def test_store_returns_array_of_errors_if_errors(
self,
add_period_keys_patch,
save_record_patch):
self.setup_config({
'schema': self.schema,
'auto_ids': ["_timestamp", "that"]})
errors = self.data_set.store([
{"_timestamp": "2014-06-1xxx0:00:00+0000"},
{'thing': {}},
{'_foo': 'bar'}])
assert_that(
len(filter(
lambda error: "'_timestamp' is a required property" in error,
errors)),
is_(2)
)
assert_that(
"'2014-06-1xxx0:00:00+0000' is not a 'date-time'" in errors,
is_(True)
)
assert_that(
"thing has an invalid value" in errors,
is_(True)
)
assert_that(
"_foo is not a recognised internal field" in errors,
is_(True)
)
assert_that(
"_timestamp is not a valid datetime object" in errors,
is_(True)
)
assert_that(
'The following required id fields are missing: that' in errors,
is_(True)
)
assert_that(
'the _timestamp must be a date in the format yyyy-MM-ddT00:00:00Z'
in errors,
is_(True)
)
assert_that(
len(errors),
is_(8)
)
assert_that(add_period_keys_patch.called, is_(False))
assert_that(save_record_patch.called, is_(False))
@patch('backdrop.core.storage.mongo.MongoStorageEngine.save_record')
@patch('backdrop.core.records.add_period_keys')
def test_store_does_not_get_auto_id_type_error_due_to_datetime(
self,
add_period_keys_patch,
save_record_patch):
self.setup_config({
'schema': self.schema,
'auto_ids': ["_timestamp", "that"]})
errors = self.data_set.store([
{"_timestamp": "2012-12-12T00:00:00+00:00", 'that': 'dog'},
{'thing': {}},
{'_foo': 'bar'}])
assert_that(
len(filter(
lambda error: "'_timestamp' is a required property" in error,
errors)),
is_(2)
)
assert_that(
"thing has an invalid value" in errors,
is_(True)
)
assert_that(
"_foo is not a recognised internal field" in errors,
is_(True)
)
assert_that(
'The following required id fields are missing: _timestamp, that'
in errors,
is_(True)
)
assert_that(
len(errors),
is_(5)
)
assert_that(add_period_keys_patch.called, is_(False))
assert_that(save_record_patch.called, is_(False))
class TestDataSet_patch(BaseDataSetTest):
schema = {
"$schema": "http://json-schema.org/schema#",
"title": "Timestamps",
"type": "object",
"properties": {
"_timestamp": {
"description": "An ISO8601 formatted date time",
"type": "string",
"format": "date-time"
}
},
"required": ["_timestamp"]
}
def test_patching_a_simple_record(self):
self.data_set.patch('uuid', {'foo': 'bar'})
self.mock_storage.update_record.assert_called_with(
'test_data_set', 'uuid', {'foo': 'bar'})
def test_record_not_found(self):
self.mock_storage.find_record.return_value = None
result = self.data_set.patch('uuid', {'foo': 'bar'})
assert_that(result, is_('No record found with id uuid'))
class TestDataSet_delete(BaseDataSetTest):
schema = {
"$schema": "http://json-schema.org/schema#",
"title": "Timestamps",
"type": "object",
"properties": {
"_timestamp": {
"description": "An ISO8601 formatted date time",
"type": "string",
"format": "date-time"
}
},
"required": ["_timestamp"]
}
def test_deleting_a_simple_record(self):
self.data_set.delete('uuid')
self.mock_storage.delete_record.assert_called_with(
'test_data_set', 'uuid'
)
def test_record_not_found(self):
self.mock_storage.find_record.return_value = None
result = self.data_set.delete('uuid')
assert_that(result, is_('No record found with id uuid'))
class TestDataSet_execute_query(BaseDataSetTest):
def test_period_query_fails_when_weeks_do_not_start_on_monday(self):
self.mock_storage.execute_query.return_value = [
{"_week_start_at": d(2013, 1, 7, 0, 0, 0), "_count": 3},
{"_week_start_at": d(2013, 1, 8, 0, 0, 0), "_count": 1},
]
assert_raises(
ValueError,
self.data_set.execute_query,
Query.create(period=WEEK)
)
def test_last_updated_only_queries_once(self):
self.mock_storage.get_last_updated.return_value = 3
initial_last_updated = self.data_set.get_last_updated()
second_last_updated = self.data_set.get_last_updated()
assert_that(initial_last_updated, is_(3))
assert_that(second_last_updated, is_(3))
assert_that(self.mock_storage.get_last_updated.call_count, 1)
def test_period_query_fails_when_months_do_not_start_on_the_1st(self):
self.mock_storage.execute_query.return_value = [
{"_month_start_at": d(2013, 1, 7, 0, 0, 0), "_count": 3},
{"_month_start_at": d(2013, 2, 8, 0, 0, 0), "_count": 1},
]
assert_raises(
ValueError,
self.data_set.execute_query,
Query.create(period=MONTH)
)
def test_period_query_adds_missing_periods_in_correct_order(self):
self.mock_storage.execute_query.return_value = [
{"_week_start_at": d(2013, 1, 14, 0, 0, 0), "_count": 32},
{"_week_start_at": d(2013, 1, 21, 0, 0, 0), "_count": 45},
{"_week_start_at": d(2013, 2, 4, 0, 0, 0), "_count": 17},
]
result = self.data_set.execute_query(
Query.create(period=WEEK,
start_at=d_tz(2013, 1, 7, 0, 0,
0),
end_at=d_tz(2013, 2, 18, 0, 0,
0)))
assert_that(result, contains(
has_entries({"_start_at": d_tz(2013, 1, 7), "_count": 0}),
has_entries({"_start_at": d_tz(2013, 1, 14), "_count": 32}),
has_entries({"_start_at": d_tz(2013, 1, 21), "_count": 45}),
has_entries({"_start_at": d_tz(2013, 1, 28), "_count": 0}),
has_entries({"_start_at": d_tz(2013, 2, 4), "_count": 17}),
has_entries({"_start_at": d_tz(2013, 2, 11), "_count": 0}),
))
def test_week_and_group_query(self):
self.mock_storage.execute_query.return_value = [
{"some_group": "val1", "_week_start_at": d(2013, 1, 7), "_count": 1},
{"some_group": "val1", "_week_start_at": d(2013, 1, 14), "_count": 5},
{"some_group": "val2", "_week_start_at": d(2013, 1, 7), "_count": 2},
{"some_group": "val2", "_week_start_at": d(2013, 1, 14), "_count": 6},
]
data = self.data_set.execute_query(
Query.create(period=WEEK, group_by=['some_group']))
assert_that(data, has_length(2))
assert_that(data, has_item(has_entries({
"values": has_item({
"_start_at": d_tz(2013, 1, 7, 0, 0, 0),
"_end_at": d_tz(2013, 1, 14, 0, 0, 0),
"_count": 1
}),
"some_group": "val1"
})))
assert_that(data, has_item(has_entries({
"values": has_item({
"_start_at": d_tz(2013, 1, 14, 0, 0, 0),
"_end_at": d_tz(2013, 1, 21, 0, 0, 0),
"_count": 5
}),
"some_group": "val1"
})))
assert_that(data, has_item(has_entries({
"values": has_item({
"_start_at": d_tz(2013, 1, 7, 0, 0, 0),
"_end_at": d_tz(2013, 1, 14, 0, 0, 0),
"_count": 2
}),
"some_group": "val2"
})))
assert_that(data, has_item(has_entries({
"values": has_item({
"_start_at": d_tz(2013, 1, 14, 0, 0, 0),
"_end_at": d_tz(2013, 1, 21, 0, 0, 0),
"_count": 6
}),
"some_group": "val2"
})))
def test_flattened_week_and_group_query(self):
self.mock_storage.execute_query.return_value = [
{"some_group": "val1", "_week_start_at": d(2013, 1, 7), "_count": 1},
{"some_group": "val1", "_week_start_at": d(2013, 1, 14), "_count": 5},
{"some_group": "val2", "_week_start_at": d(2013, 1, 7), "_count": 2},
{"some_group": "val2", "_week_start_at": d(2013, 1, 14), "_count": 6},
]
data = self.data_set.execute_query(
Query.create(period=WEEK, group_by=['some_group'], flatten=True))
assert_that(data, has_length(4))
assert_that(data, has_item(has_entries({
"_start_at": d_tz(2013, 1, 7, 0, 0, 0),
"_end_at": d_tz(2013, 1, 14, 0, 0, 0),
"_count": 1,
"some_group": "val1"
})))
assert_that(data, has_item(has_entries({
"_start_at": d_tz(2013, 1, 14, 0, 0, 0),
"_end_at": d_tz(2013, 1, 21, 0, 0, 0),
"_count": 5,
"some_group": "val1"
})))
assert_that(data, has_item(has_entries({
"_start_at": d_tz(2013, 1, 7, 0, 0, 0),
"_end_at": d_tz(2013, 1, 14, 0, 0, 0),
"_count": 2,
"some_group": "val2"
})))
assert_that(data, has_item(has_entries({
"_start_at": d_tz(2013, 1, 14, 0, 0, 0),
"_end_at": d_tz(2013, 1, 21, 0, 0, 0),
"_count": 6,
"some_group": "val2"
})))
def test_month_and_group_query(self):
self.mock_storage.execute_query.return_value = [
{'some_group': 'val1', '_month_start_at': d(2013, 1, 1), '_count': 1},
{'some_group': 'val1', '_month_start_at': d(2013, 2, 1), '_count': 5},
{'some_group': 'val2', '_month_start_at': d(2013, 3, 1), '_count': 2},
{'some_group': 'val2', '_month_start_at': d(2013, 4, 1), '_count': 6},
{'some_group': 'val2', '_month_start_at': d(2013, 7, 1), '_count': 6},
]
data = self.data_set.execute_query(Query.create(period=MONTH,
group_by=['some_group']))
assert_that(data,
has_item(has_entries({"values": has_length(2)})))
assert_that(data,
has_item(has_entries({"values": has_length(3)})))
def test_month_and_groups_query(self):
self.mock_storage.execute_query.return_value = [
{'some_group': 'val1', 'another_group': 'val3', '_month_start_at': d(2013, 1, 1), '_count': 1},
{'some_group': 'val1', 'another_group': 'val3', '_month_start_at': d(2013, 2, 1), '_count': 5},
{'some_group': 'val2', 'another_group': 'val3', '_month_start_at': d(2013, 3, 1), '_count': 2},
{'some_group': 'val2', 'another_group': 'val3', '_month_start_at': d(2013, 4, 1), '_count': 6},
{'some_group': 'val2', 'another_group': 'val3', '_month_start_at': d(2013, 7, 1), '_count': 6},
]
data = self.data_set.execute_query(Query.create(period=MONTH,
group_by=['some_group', 'another_group']))
assert_that(data,
has_item(has_entries({"values": has_length(2)})))
assert_that(data,
has_item(has_entries({"values": has_length(3)})))
def test_month_and_group_query_with_start_and_end_at(self):
self.mock_storage.execute_query.return_value = [
{'some_group': 'val1', '_month_start_at': d(2013, 1, 1), '_count': 1},
{'some_group': 'val1', '_month_start_at': d(2013, 2, 1), '_count': 5},
{'some_group': 'val2', '_month_start_at': d(2013, 3, 1), '_count': 2},
{'some_group': 'val2', '_month_start_at': d(2013, 4, 1), '_count': 6},
{'some_group': 'val2', '_month_start_at': d(2013, 7, 1), '_count': 6},
]
data = self.data_set.execute_query(
Query.create(period=MONTH,
group_by=['some_group'],
start_at=d(2013, 1, 1),
end_at=d(2013, 4, 2)))
assert_that(data,
has_item(has_entries({"values": has_length(4)})))
assert_that(data,
has_item(has_entries({"values": has_length(4)})))
first_group = data[0]["values"]
assert_that(first_group, has_item(has_entries({
"_start_at": d_tz(2013, 3, 1)})))
assert_that(first_group, has_item(has_entries({
"_start_at": d_tz(2013, 4, 1)})))
first_group = data[1]["values"]
assert_that(first_group, has_item(has_entries({
"_start_at": d_tz(2013, 1, 1)})))
assert_that(first_group, has_item(has_entries({
"_start_at": d_tz(2013, 2, 1)})))
def test_flattened_month_and_group_query_with_start_and_end_at(self):
self.mock_storage.execute_query.return_value = [
{'some_group': 'val1', '_month_start_at': d(2013, 1, 1), '_count': 1},
{'some_group': 'val1', '_month_start_at': d(2013, 2, 1), '_count': 5},
{'some_group': 'val2', '_month_start_at': d(2013, 3, 1), '_count': 2},
{'some_group': 'val2', '_month_start_at': d(2013, 4, 1), '_count': 6},
{'some_group': 'val2', '_month_start_at': d(2013, 7, 1), '_count': 6},
]
data = self.data_set.execute_query(
Query.create(period=MONTH,
group_by=['some_group'],
start_at=d(2013, 1, 1),
end_at=d(2013, 4, 2),
flatten=True))
assert_that(data, has_length(8))
assert_that(data, has_item(has_entries({
'_count': 1,
'_start_at': d_tz(2013, 1, 1),
'some_group': 'val1',
})))
assert_that(data, has_item(has_entries({
'_count': 5,
'_start_at': d_tz(2013, 2, 1),
'some_group': 'val1',
})))
assert_that(data, has_item(has_entries({
'_count': 2,
'_start_at': d_tz(2013, 3, 1),
'some_group': 'val2',
})))
assert_that(data, has_item(has_entries({
'_count': 6,
'_start_at': d_tz(2013, 4, 1),
'some_group': 'val2',
})))
def test_period_group_query_adds_missing_periods_in_correct_order(self):
self.mock_storage.execute_query.return_value = [
{'some_group': 'val1', '_week_start_at': d(2013, 1, 14), '_count': 23},
{'some_group': 'val1', '_week_start_at': d(2013, 1, 21), '_count': 41},
{'some_group': 'val2', '_week_start_at': d(2013, 1, 14), '_count': 31},
{'some_group': 'val2', '_week_start_at': d(2013, 1, 28), '_count': 12},
]
data = self.data_set.execute_query(
Query.create(period=WEEK, group_by=['some_group'],
start_at=d_tz(2013, 1, 7, 0, 0, 0),
end_at=d_tz(2013, 2, 4, 0, 0, 0)))
assert_that(data, has_item(has_entries({
"some_group": "val1",
"values": contains(
has_entries({"_start_at": d_tz(2013, 1, 7), "_count": 0}),
has_entries({"_start_at": d_tz(2013, 1, 14), "_count": 23}),
has_entries({"_start_at": d_tz(2013, 1, 21), "_count": 41}),
has_entries({"_start_at": d_tz(2013, 1, 28), "_count": 0}),
),
})))
assert_that(data, has_item(has_entries({
"some_group": "val2",
"values": contains(
has_entries({"_start_at": d_tz(2013, 1, 7), "_count": 0}),
has_entries({"_start_at": d_tz(2013, 1, 14), "_count": 31}),
has_entries({"_start_at": d_tz(2013, 1, 21), "_count": 0}),
has_entries({"_start_at": d_tz(2013, 1, 28), "_count": 12}),
),
})))
def test_sorted_week_and_group_query(self):
self.mock_storage.execute_query.return_value = [
{'some_group': 'val1', '_week_start_at': d(2013, 1, 7), '_count': 1},
{'some_group': 'val1', '_week_start_at': d(2013, 1, 14), '_count': 5},
{'some_group': 'val2', '_week_start_at': d(2013, 1, 7), '_count': 2},
{'some_group': 'val2', '_week_start_at': d(2013, 1, 14), '_count': 6},
]
query = Query.create(period=WEEK, group_by=['some_group'],
sort_by=["_count", "descending"])
data = self.data_set.execute_query(query)
assert_that(data, contains(
has_entries({'some_group': 'val2'}),
has_entries({'some_group': 'val1'}),
))
def test_flattened_sorted_week_and_group_query(self):
self.mock_storage.execute_query.return_value = [
{'some_group': 'val1', '_week_start_at': d(2013, 1, 7), '_count': 1},
{'some_group': 'val1', '_week_start_at': d(2013, 1, 14), '_count': 5},
{'some_group': 'val2', '_week_start_at': d(2013, 1, 7), '_count': 2},
{'some_group': 'val2', '_week_start_at': d(2013, 1, 14), '_count': 6},
]
query = Query.create(period=WEEK, group_by=['some_group'],
sort_by=["_count", "descending"], flatten=True)
data = self.data_set.execute_query(query)
assert_that(data, contains(
has_entries({'_start_at': d_tz(2013, 1, 14)}),
has_entries({'_start_at': d_tz(2013, 1, 14)}),
has_entries({'_start_at': d_tz(2013, 1, 7)}),
has_entries({'_start_at': d_tz(2013, 1, 7)}),
))
def test_sorted_week_and_group_query_with_limit(self):
self.mock_storage.execute_query.return_value = [
{'some_group': 'val1', '_week_start_at': d(2013, 1, 7), '_count': 1},
{'some_group': 'val2', '_week_start_at': d(2013, 1, 14), '_count': 5},
]
query = Query.create(period=WEEK, group_by=['some_group'],
sort_by=["_count", "descending"], limit=1,
collect=[])
data = self.data_set.execute_query(query)
assert_that(data, contains(
has_entries({'some_group': 'val2'})
))
class TestDataSet_create(BaseDataSetTest):
def test_data_set_is_created_if_it_does_not_exist(self):
self.mock_storage.data_set_exists.return_value = False
self.data_set.create_if_not_exists()
self.mock_storage.create_data_set.assert_called_with(
'test_data_set', 0)
def test_data_set_is_not_created_if_it_does_exist(self):
self.mock_storage.data_set_exists.return_value = True
self.data_set.create_if_not_exists()
assert_that(self.mock_storage.create_data_set.called, is_(False))
| {
"repo_name": "alphagov/backdrop",
"path": "tests/core/test_data_set.py",
"copies": "1",
"size": "24289",
"license": "mit",
"hash": 6654049791038853000,
"line_mean": 38.8180327869,
"line_max": 107,
"alpha_frac": 0.5088311581,
"autogenerated": false,
"ratio": 3.355761260016579,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9360006082249406,
"avg_score": 0.0009172671734344897,
"num_lines": 610
} |
from freezegun import freeze_time
from rest_framework import test
from waldur_core.structure.tests import factories as structure_factories
from waldur_mastermind.invoices import models as invoice_models
from waldur_mastermind.invoices.tests import factories as invoice_factories
from waldur_mastermind.invoices.tests import fixtures as invoice_fixtures
@freeze_time('2017-01-10')
class PriceCurrentTest(test.APITransactionTestCase):
def setUp(self):
self.fixture = invoice_fixtures.InvoiceFixture()
invoice_factories.InvoiceItemFactory(
invoice=self.fixture.invoice,
project=self.fixture.project,
unit=invoice_models.InvoiceItem.Units.PER_MONTH,
unit_price=100,
quantity=1,
)
invoice_factories.InvoiceItemFactory(
invoice=self.fixture.invoice,
project=self.fixture.project,
unit=invoice_models.InvoiceItem.Units.PER_DAY,
unit_price=3,
)
def test_current_price(self):
self.client.force_authenticate(self.fixture.staff)
url = structure_factories.CustomerFactory.get_url(self.fixture.project.customer)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
data = response.json()
self.assertEqual(data['billing_price_estimate']['current'], 100 + 9 * 3)
diff = (
data['billing_price_estimate']['total']
- data['billing_price_estimate']['current']
)
self.assertEqual(diff, (31 - 9) * 3)
| {
"repo_name": "opennode/waldur-mastermind",
"path": "src/waldur_mastermind/billing/tests/test_price_current.py",
"copies": "1",
"size": "1560",
"license": "mit",
"hash": 5037352213644866000,
"line_mean": 38,
"line_max": 88,
"alpha_frac": 0.6717948718,
"autogenerated": false,
"ratio": 4.0625,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.52342948718,
"avg_score": null,
"num_lines": null
} |
from freezegun import freeze_time
from rest_framework import test
from waldur_core.structure.tests import factories as structure_factories
from . import factories
class EventsStatsGetTest(test.APITransactionTestCase):
def setUp(self) -> None:
with freeze_time('2021-01-01'):
self.user = structure_factories.UserFactory(is_staff=True)
with freeze_time('2021-02-01'):
self.user2 = structure_factories.UserFactory(is_staff=True)
event = factories.EventFactory()
factories.FeedFactory(scope=self.user, event=event)
self.client.force_login(self.user)
self.url = factories.EventFactory.get_stats_list_url()
def test_get_events_stats(self):
response = self.client.get(
self.url, {'scope': structure_factories.UserFactory.get_url(self.user)}
)
self.assertEqual(200, response.status_code)
self.assertEqual(2, len(response.data))
self.assertEqual(
[
{'year': 2021, 'month': 2, 'count': 1},
{'year': 2021, 'month': 1, 'count': 2},
],
response.data,
)
def test_events_stats_filter_by_event_type(self):
response = self.client.get(self.url, {'event_type': 'user_creation_succeeded'})
self.assertEqual(200, response.status_code)
self.assertEqual(2, len(response.data))
self.assertEqual(
[
{'year': 2021, 'month': 2, 'count': 1},
{'year': 2021, 'month': 1, 'count': 1},
],
response.data,
)
def test_unauthorized_user_can_not_get_stats(self):
self.client.logout()
response = self.client.get(
self.url, {'scope': structure_factories.UserFactory.get_url(self.user)}
)
self.assertEqual(401, response.status_code)
| {
"repo_name": "opennode/waldur-mastermind",
"path": "src/waldur_core/logging/tests/test_events_stats.py",
"copies": "1",
"size": "1883",
"license": "mit",
"hash": 6467918269006275000,
"line_mean": 31.4655172414,
"line_max": 87,
"alpha_frac": 0.5942644716,
"autogenerated": false,
"ratio": 3.8586065573770494,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.49528710289770495,
"avg_score": null,
"num_lines": null
} |
from freezegun import freeze_time
from sure import expect
from moto.swf.exceptions import SWFWorkflowExecutionClosedError
from moto.swf.models import DecisionTask, Timeout
from ..utils import make_workflow_execution, process_first_timeout
def test_decision_task_creation():
wfe = make_workflow_execution()
dt = DecisionTask(wfe, 123)
dt.workflow_execution.should.equal(wfe)
dt.state.should.equal("SCHEDULED")
dt.task_token.should_not.be.empty
dt.started_event_id.should.be.none
def test_decision_task_full_dict_representation():
wfe = make_workflow_execution()
wft = wfe.workflow_type
dt = DecisionTask(wfe, 123)
fd = dt.to_full_dict()
fd["events"].should.be.a("list")
fd["previousStartedEventId"].should.equal(0)
fd.should_not.contain("startedEventId")
fd.should.contain("taskToken")
fd["workflowExecution"].should.equal(wfe.to_short_dict())
fd["workflowType"].should.equal(wft.to_short_dict())
dt.start(1234)
fd = dt.to_full_dict()
fd["startedEventId"].should.equal(1234)
def test_decision_task_first_timeout():
wfe = make_workflow_execution()
dt = DecisionTask(wfe, 123)
dt.first_timeout().should.be.none
with freeze_time("2015-01-01 12:00:00"):
dt.start(1234)
dt.first_timeout().should.be.none
# activity task timeout is 300s == 5mins
with freeze_time("2015-01-01 12:06:00"):
dt.first_timeout().should.be.a(Timeout)
dt.complete()
dt.first_timeout().should.be.none
def test_decision_task_cannot_timeout_on_closed_workflow_execution():
with freeze_time("2015-01-01 12:00:00"):
wfe = make_workflow_execution()
wfe.start()
with freeze_time("2015-01-01 13:55:00"):
dt = DecisionTask(wfe, 123)
dt.start(1234)
with freeze_time("2015-01-01 14:10:00"):
dt.first_timeout().should.be.a(Timeout)
wfe.first_timeout().should.be.a(Timeout)
process_first_timeout(wfe)
dt.first_timeout().should.be.none
def test_decision_task_cannot_change_state_on_closed_workflow_execution():
wfe = make_workflow_execution()
wfe.start()
task = DecisionTask(wfe, 123)
wfe.complete(123)
task.timeout.when.called_with(Timeout(task, 0, "foo")).should.throw(SWFWorkflowExecutionClosedError)
task.complete.when.called_with().should.throw(SWFWorkflowExecutionClosedError)
| {
"repo_name": "EarthmanT/moto",
"path": "tests/test_swf/models/test_decision_task.py",
"copies": "3",
"size": "2386",
"license": "apache-2.0",
"hash": -7731872442409320000,
"line_mean": 31.2432432432,
"line_max": 104,
"alpha_frac": 0.6877619447,
"autogenerated": false,
"ratio": 3.237449118046133,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5425211062746134,
"avg_score": null,
"num_lines": null
} |
from freezegun import freeze_time
from sure import expect
from moto.swf.exceptions import SWFWorkflowExecutionClosedError
from moto.swf.models import (
ActivityTask,
ActivityType,
Timeout,
)
from ..utils import (
ACTIVITY_TASK_TIMEOUTS,
make_workflow_execution,
process_first_timeout,
)
def test_activity_task_creation():
wfe = make_workflow_execution()
task = ActivityTask(
activity_id="my-activity-123",
activity_type="foo",
input="optional",
scheduled_event_id=117,
workflow_execution=wfe,
timeouts=ACTIVITY_TASK_TIMEOUTS,
)
task.workflow_execution.should.equal(wfe)
task.state.should.equal("SCHEDULED")
task.task_token.should_not.be.empty
task.started_event_id.should.be.none
task.start(123)
task.state.should.equal("STARTED")
task.started_event_id.should.equal(123)
task.complete()
task.state.should.equal("COMPLETED")
# NB: this doesn't make any sense for SWF, a task shouldn't go from a
# "COMPLETED" state to a "FAILED" one, but this is an internal state on our
# side and we don't care about invalid state transitions for now.
task.fail()
task.state.should.equal("FAILED")
def test_activity_task_full_dict_representation():
wfe = make_workflow_execution()
wft = wfe.workflow_type
at = ActivityTask(
activity_id="my-activity-123",
activity_type=ActivityType("foo", "v1.0"),
input="optional",
scheduled_event_id=117,
timeouts=ACTIVITY_TASK_TIMEOUTS,
workflow_execution=wfe,
)
at.start(1234)
fd = at.to_full_dict()
fd["activityId"].should.equal("my-activity-123")
fd["activityType"]["version"].should.equal("v1.0")
fd["input"].should.equal("optional")
fd["startedEventId"].should.equal(1234)
fd.should.contain("taskToken")
fd["workflowExecution"].should.equal(wfe.to_short_dict())
at.start(1234)
fd = at.to_full_dict()
fd["startedEventId"].should.equal(1234)
def test_activity_task_reset_heartbeat_clock():
wfe = make_workflow_execution()
with freeze_time("2015-01-01 12:00:00"):
task = ActivityTask(
activity_id="my-activity-123",
activity_type="foo",
input="optional",
scheduled_event_id=117,
timeouts=ACTIVITY_TASK_TIMEOUTS,
workflow_execution=wfe,
)
task.last_heartbeat_timestamp.should.equal(1420113600.0)
with freeze_time("2015-01-01 13:00:00"):
task.reset_heartbeat_clock()
task.last_heartbeat_timestamp.should.equal(1420117200.0)
def test_activity_task_first_timeout():
wfe = make_workflow_execution()
with freeze_time("2015-01-01 12:00:00"):
task = ActivityTask(
activity_id="my-activity-123",
activity_type="foo",
input="optional",
scheduled_event_id=117,
timeouts=ACTIVITY_TASK_TIMEOUTS,
workflow_execution=wfe,
)
task.first_timeout().should.be.none
# activity task timeout is 300s == 5mins
with freeze_time("2015-01-01 12:06:00"):
task.first_timeout().should.be.a(Timeout)
process_first_timeout(task)
task.state.should.equal("TIMED_OUT")
task.timeout_type.should.equal("HEARTBEAT")
def test_activity_task_cannot_timeout_on_closed_workflow_execution():
with freeze_time("2015-01-01 12:00:00"):
wfe = make_workflow_execution()
wfe.start()
with freeze_time("2015-01-01 13:58:00"):
task = ActivityTask(
activity_id="my-activity-123",
activity_type="foo",
input="optional",
scheduled_event_id=117,
timeouts=ACTIVITY_TASK_TIMEOUTS,
workflow_execution=wfe,
)
with freeze_time("2015-01-01 14:10:00"):
task.first_timeout().should.be.a(Timeout)
wfe.first_timeout().should.be.a(Timeout)
process_first_timeout(wfe)
task.first_timeout().should.be.none
def test_activity_task_cannot_change_state_on_closed_workflow_execution():
wfe = make_workflow_execution()
wfe.start()
task = ActivityTask(
activity_id="my-activity-123",
activity_type="foo",
input="optional",
scheduled_event_id=117,
timeouts=ACTIVITY_TASK_TIMEOUTS,
workflow_execution=wfe,
)
wfe.complete(123)
task.timeout.when.called_with(Timeout(task, 0, "foo")).should.throw(SWFWorkflowExecutionClosedError)
task.complete.when.called_with().should.throw(SWFWorkflowExecutionClosedError)
task.fail.when.called_with().should.throw(SWFWorkflowExecutionClosedError)
| {
"repo_name": "zonk1024/moto",
"path": "tests/test_swf/models/test_activity_task.py",
"copies": "3",
"size": "4683",
"license": "apache-2.0",
"hash": -2569850106161329000,
"line_mean": 30.4295302013,
"line_max": 104,
"alpha_frac": 0.644885757,
"autogenerated": false,
"ratio": 3.5450416351249054,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0007693438012384545,
"num_lines": 149
} |
from freezegun import freeze_time
from .testcase import BaseTestCase
class TTLTestCase(BaseTestCase):
def test_it(self):
from djangostdnet import models, ttl as ttl_mod
class AModel(models.Model):
ttl = ttl_mod.TTLField()
manager_class = ttl_mod.TTLManager
class Meta:
register = False
obj = AModel.objects.new(ttl=None)
self.assertEqual(AModel.objects.get(id=obj.id).ttl, None,
"Must not raise ObjectDoesNotExist if TTL is not set")
obj = AModel.objects.new(ttl=10)
self.assertEqual(AModel.objects.get(id=obj.id).ttl, 10,
"Must not raise ObjectDoesNotExist if TTL is effective")
obj = AModel.objects.new(ttl=-1)
with self.assertRaises(AModel.DoesNotExist,
msg="Must raise ObjectDoesNotExist if TTL is expired"):
AModel.objects.get(id=obj.id)
with freeze_time('1970-01-01'):
obj = AModel.objects.new(ttl=10)
with self.assertRaises(AModel.DoesNotExist,
msg="Even TTL value is positive, "
"Must raise ObjectDoesNotExist if its expired"):
AModel.objects.get(id=obj.id)
def _make_simple_ttl_instances(self):
from stdnet import odm
from djangostdnet import models, ttl as ttl_mod
class AModel(models.Model):
name = odm.CharField()
ttl = ttl_mod.TTLField()
manager_class = ttl_mod.TTLManager
class Meta:
register = False
# instances with enough ttl
AModel.objects.new(name='foo1', ttl=100)
with freeze_time('1970-01-01'):
AModel.objects.new(name='foo2', ttl=100)
AModel.objects.new(name='foo3', ttl=100)
with freeze_time('1970-01-01'):
AModel.objects.new(name='foo4', ttl=100)
AModel.objects.new(name='foo5', ttl=100)
return AModel.objects
def test_slice_index(self):
"""
When a object allocated at the index is expired, retrieves valid successor in index access.
Caveat When accessing a object allocated at the index is valid, will skip the expired predecessors.
"""
objects = self._make_simple_ttl_instances()
obj = objects.query()[1]
self.assertEqual(obj.name, 'foo3')
obj = objects.query()[1]
self.assertEqual(obj.name, 'foo3')
obj = objects.query()[3]
self.assertEqual(obj.name, 'foo5')
obj = objects.query()[2]
self.assertEqual(obj.name, 'foo5')
with self.assertRaises(IndexError):
objects.query()[3]
# These slice behavior may be changed in the future.
def test_slice_start(self):
objects = self._make_simple_ttl_instances()
# drop foo2 and foo4
self.assertEqual([obj.name for obj in objects.query()[1:]],
['foo3', 'foo5'])
def test_slice_stop(self):
objects = self._make_simple_ttl_instances()
# drop foo2
self.assertEqual([obj.name for obj in objects.query()[:1]],
['foo1'])
def test_slice_start_stop(self):
objects = self._make_simple_ttl_instances()
# drop foo2 and foo4
self.assertEqual([obj.name for obj in objects.query()[1:3]],
['foo3'])
| {
"repo_name": "capy-inc/django-stdnet",
"path": "tests/ttl.py",
"copies": "2",
"size": "3443",
"license": "mit",
"hash": -2914404350123311600,
"line_mean": 35.6276595745,
"line_max": 107,
"alpha_frac": 0.5788556491,
"autogenerated": false,
"ratio": 4.045828437132785,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5624684086232785,
"avg_score": null,
"num_lines": null
} |
from freezegun import freeze_time
import responses
from botless.integrations.toggl import Toggl, TOGGL_REPORTS_DETAILS_URL
@freeze_time('2017-12-31')
def test_toggl_since_until_defaults(monkeypatch):
monkeypatch.setenv('TOGGL_API_KEY', 'SECRET_API_KEY')
monkeypatch.setenv('TOGGL_WORKSPACE_ID', '649573')
toggl = Toggl()
assert toggl.since == '2017-01-01'
assert toggl.until == '2017-12-31'
@responses.activate
def test_toggl(monkeypatch):
monkeypatch.setenv('TOGGL_API_KEY', 'SECRET_API_KEY')
monkeypatch.setenv('TOGGL_WORKSPACE_ID', '649573')
mock_response = {
'total_grand': 30149000,
'total_billable': 0,
'total_currencies': [
{
'currency': 'USD',
'amount': 0
}
],
'total_count': 1,
'per_page': 50,
'data': [
{
'id': 632804082,
'pid': 5860998,
'tid': 10906376,
'uid': 2879549,
'description': 'Working hard',
'start': '2017-07-03T08:05:49-05:00',
'end': '2017-07-03T16:28:18-05:00',
'updated': '2017-07-03T16:28:20-05:00',
'dur': 30149000,
'user': 'Luigi Brian',
'use_stop': True,
'client': None,
'project': 'Support',
'project_color': '0',
'project_hex_color': '#c56bff',
'task': 'VIT (Very Important Task)',
'billable': 0,
'is_billable': False,
'cur': 'USD',
'tags': [
'Laser Tag'
]
}
]
}
responses.add(responses.GET, TOGGL_REPORTS_DETAILS_URL, json=mock_response, status=200)
toggl = Toggl()
report = toggl.get_detailed_report(user_ids='2879549', since='2017-07-01', until='2017-07-04')
assert report[0] == {
'billable': 0.0,
'client': None,
'cur': 'USD',
'description': 'Working hard',
'dur': 30149000,
'end': '2017-07-03T16:28:18-05:00',
'id': 632804082,
'is_billable': False,
'pid': 5860998,
'project': 'Support',
'project_color': '0',
'project_hex_color': '#c56bff',
'start': '2017-07-03T08:05:49-05:00',
'tags': ['Laser Tag'],
'task': 'VIT (Very Important Task)',
'tid': 10906376,
'uid': 2879549,
'updated': '2017-07-03T16:28:20-05:00',
'use_stop': True,
'user': 'Luigi Brian'
}
| {
"repo_name": "ivansabik/botless",
"path": "tests/test_toggl.py",
"copies": "1",
"size": "2616",
"license": "mit",
"hash": -5397074136260392000,
"line_mean": 29.0689655172,
"line_max": 98,
"alpha_frac": 0.4877675841,
"autogenerated": false,
"ratio": 3.241635687732342,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.42294032718323415,
"avg_score": null,
"num_lines": null
} |
from freezegun import freeze_time
import sure # noqa
from moto.swf.exceptions import SWFWorkflowExecutionClosedError
from moto.swf.models import ActivityTask, ActivityType, Timeout
from ..utils import (
ACTIVITY_TASK_TIMEOUTS,
make_workflow_execution,
process_first_timeout,
)
def test_activity_task_creation():
wfe = make_workflow_execution()
task = ActivityTask(
activity_id="my-activity-123",
activity_type="foo",
input="optional",
scheduled_event_id=117,
workflow_execution=wfe,
timeouts=ACTIVITY_TASK_TIMEOUTS,
)
task.workflow_execution.should.equal(wfe)
task.state.should.equal("SCHEDULED")
task.task_token.should_not.be.empty
task.started_event_id.should.be.none
task.start(123)
task.state.should.equal("STARTED")
task.started_event_id.should.equal(123)
task.complete()
task.state.should.equal("COMPLETED")
# NB: this doesn't make any sense for SWF, a task shouldn't go from a
# "COMPLETED" state to a "FAILED" one, but this is an internal state on our
# side and we don't care about invalid state transitions for now.
task.fail()
task.state.should.equal("FAILED")
def test_activity_task_full_dict_representation():
wfe = make_workflow_execution()
at = ActivityTask(
activity_id="my-activity-123",
activity_type=ActivityType("foo", "v1.0"),
input="optional",
scheduled_event_id=117,
timeouts=ACTIVITY_TASK_TIMEOUTS,
workflow_execution=wfe,
)
at.start(1234)
fd = at.to_full_dict()
fd["activityId"].should.equal("my-activity-123")
fd["activityType"]["version"].should.equal("v1.0")
fd["input"].should.equal("optional")
fd["startedEventId"].should.equal(1234)
fd.should.contain("taskToken")
fd["workflowExecution"].should.equal(wfe.to_short_dict())
at.start(1234)
fd = at.to_full_dict()
fd["startedEventId"].should.equal(1234)
def test_activity_task_reset_heartbeat_clock():
wfe = make_workflow_execution()
with freeze_time("2015-01-01 12:00:00"):
task = ActivityTask(
activity_id="my-activity-123",
activity_type="foo",
input="optional",
scheduled_event_id=117,
timeouts=ACTIVITY_TASK_TIMEOUTS,
workflow_execution=wfe,
)
task.last_heartbeat_timestamp.should.equal(1420113600.0)
with freeze_time("2015-01-01 13:00:00"):
task.reset_heartbeat_clock()
task.last_heartbeat_timestamp.should.equal(1420117200.0)
def test_activity_task_first_timeout():
wfe = make_workflow_execution()
with freeze_time("2015-01-01 12:00:00"):
task = ActivityTask(
activity_id="my-activity-123",
activity_type="foo",
input="optional",
scheduled_event_id=117,
timeouts=ACTIVITY_TASK_TIMEOUTS,
workflow_execution=wfe,
)
task.first_timeout().should.be.none
# activity task timeout is 300s == 5mins
with freeze_time("2015-01-01 12:06:00"):
task.first_timeout().should.be.a(Timeout)
process_first_timeout(task)
task.state.should.equal("TIMED_OUT")
task.timeout_type.should.equal("HEARTBEAT")
def test_activity_task_first_timeout_with_heartbeat_timeout_none():
wfe = make_workflow_execution()
activity_task_timeouts = ACTIVITY_TASK_TIMEOUTS.copy()
activity_task_timeouts["heartbeatTimeout"] = "NONE"
with freeze_time("2015-01-01 12:00:00"):
task = ActivityTask(
activity_id="my-activity-123",
activity_type="foo",
input="optional",
scheduled_event_id=117,
timeouts=activity_task_timeouts,
workflow_execution=wfe,
)
task.first_timeout().should.be.none
def test_activity_task_cannot_timeout_on_closed_workflow_execution():
with freeze_time("2015-01-01 12:00:00"):
wfe = make_workflow_execution()
wfe.start()
with freeze_time("2015-01-01 13:58:00"):
task = ActivityTask(
activity_id="my-activity-123",
activity_type="foo",
input="optional",
scheduled_event_id=117,
timeouts=ACTIVITY_TASK_TIMEOUTS,
workflow_execution=wfe,
)
with freeze_time("2015-01-01 14:10:00"):
task.first_timeout().should.be.a(Timeout)
wfe.first_timeout().should.be.a(Timeout)
process_first_timeout(wfe)
task.first_timeout().should.be.none
def test_activity_task_cannot_change_state_on_closed_workflow_execution():
wfe = make_workflow_execution()
wfe.start()
task = ActivityTask(
activity_id="my-activity-123",
activity_type="foo",
input="optional",
scheduled_event_id=117,
timeouts=ACTIVITY_TASK_TIMEOUTS,
workflow_execution=wfe,
)
wfe.complete(123)
task.timeout.when.called_with(Timeout(task, 0, "foo")).should.throw(
SWFWorkflowExecutionClosedError
)
task.complete.when.called_with().should.throw(SWFWorkflowExecutionClosedError)
task.fail.when.called_with().should.throw(SWFWorkflowExecutionClosedError)
| {
"repo_name": "spulec/moto",
"path": "tests/test_swf/models/test_activity_task.py",
"copies": "2",
"size": "5227",
"license": "apache-2.0",
"hash": 293613638028003500,
"line_mean": 29.9289940828,
"line_max": 82,
"alpha_frac": 0.6407116893,
"autogenerated": false,
"ratio": 3.5899725274725274,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5230684216772528,
"avg_score": null,
"num_lines": null
} |
from freezegun import freeze_time
import sure # noqa
from moto.swf.models import (
ActivityType,
Timeout,
WorkflowType,
WorkflowExecution,
)
from moto.swf.exceptions import SWFDefaultUndefinedFault
from ..utils import (
auto_start_decision_tasks,
get_basic_domain,
get_basic_workflow_type,
make_workflow_execution,
)
VALID_ACTIVITY_TASK_ATTRIBUTES = {
"activityId": "my-activity-001",
"activityType": {"name": "test-activity", "version": "v1.1"},
"taskList": {"name": "task-list-name"},
"scheduleToStartTimeout": "600",
"scheduleToCloseTimeout": "600",
"startToCloseTimeout": "600",
"heartbeatTimeout": "300",
}
def test_workflow_execution_creation():
domain = get_basic_domain()
wft = get_basic_workflow_type()
wfe = WorkflowExecution(domain, wft, "ab1234", child_policy="TERMINATE")
wfe.domain.should.equal(domain)
wfe.workflow_type.should.equal(wft)
wfe.child_policy.should.equal("TERMINATE")
def test_workflow_execution_creation_child_policy_logic():
domain = get_basic_domain()
WorkflowExecution(
domain,
WorkflowType(
"test-workflow", "v1.0",
task_list="queue", default_child_policy="ABANDON",
default_execution_start_to_close_timeout="300",
default_task_start_to_close_timeout="300",
),
"ab1234"
).child_policy.should.equal("ABANDON")
WorkflowExecution(
domain,
WorkflowType(
"test-workflow", "v1.0", task_list="queue",
default_execution_start_to_close_timeout="300",
default_task_start_to_close_timeout="300",
),
"ab1234",
child_policy="REQUEST_CANCEL"
).child_policy.should.equal("REQUEST_CANCEL")
WorkflowExecution.when.called_with(
domain,
WorkflowType("test-workflow", "v1.0"), "ab1234"
).should.throw(SWFDefaultUndefinedFault)
def test_workflow_execution_string_representation():
wfe = make_workflow_execution(child_policy="TERMINATE")
str(wfe).should.match(r"^WorkflowExecution\(run_id: .*\)")
def test_workflow_execution_generates_a_random_run_id():
domain = get_basic_domain()
wft = get_basic_workflow_type()
wfe1 = WorkflowExecution(domain, wft, "ab1234", child_policy="TERMINATE")
wfe2 = WorkflowExecution(domain, wft, "ab1235", child_policy="TERMINATE")
wfe1.run_id.should_not.equal(wfe2.run_id)
def test_workflow_execution_short_dict_representation():
domain = get_basic_domain()
wf_type = WorkflowType(
"test-workflow", "v1.0",
task_list="queue", default_child_policy="ABANDON",
default_execution_start_to_close_timeout="300",
default_task_start_to_close_timeout="300",
)
wfe = WorkflowExecution(domain, wf_type, "ab1234")
sd = wfe.to_short_dict()
sd["workflowId"].should.equal("ab1234")
sd.should.contain("runId")
def test_workflow_execution_medium_dict_representation():
domain = get_basic_domain()
wf_type = WorkflowType(
"test-workflow", "v1.0",
task_list="queue", default_child_policy="ABANDON",
default_execution_start_to_close_timeout="300",
default_task_start_to_close_timeout="300",
)
wfe = WorkflowExecution(domain, wf_type, "ab1234")
md = wfe.to_medium_dict()
md["execution"].should.equal(wfe.to_short_dict())
md["workflowType"].should.equal(wf_type.to_short_dict())
md["startTimestamp"].should.be.a('float')
md["executionStatus"].should.equal("OPEN")
md["cancelRequested"].should.be.falsy
md.should_not.contain("tagList")
wfe.tag_list = ["foo", "bar", "baz"]
md = wfe.to_medium_dict()
md["tagList"].should.equal(["foo", "bar", "baz"])
def test_workflow_execution_full_dict_representation():
domain = get_basic_domain()
wf_type = WorkflowType(
"test-workflow", "v1.0",
task_list="queue", default_child_policy="ABANDON",
default_execution_start_to_close_timeout="300",
default_task_start_to_close_timeout="300",
)
wfe = WorkflowExecution(domain, wf_type, "ab1234")
fd = wfe.to_full_dict()
fd["executionInfo"].should.equal(wfe.to_medium_dict())
fd["openCounts"]["openTimers"].should.equal(0)
fd["openCounts"]["openDecisionTasks"].should.equal(0)
fd["openCounts"]["openActivityTasks"].should.equal(0)
fd["executionConfiguration"].should.equal({
"childPolicy": "ABANDON",
"executionStartToCloseTimeout": "300",
"taskList": {"name": "queue"},
"taskStartToCloseTimeout": "300",
})
def test_workflow_execution_list_dict_representation():
domain = get_basic_domain()
wf_type = WorkflowType(
'test-workflow', 'v1.0',
task_list='queue', default_child_policy='ABANDON',
default_execution_start_to_close_timeout='300',
default_task_start_to_close_timeout='300',
)
wfe = WorkflowExecution(domain, wf_type, 'ab1234')
ld = wfe.to_list_dict()
ld['workflowType']['version'].should.equal('v1.0')
ld['workflowType']['name'].should.equal('test-workflow')
ld['executionStatus'].should.equal('OPEN')
ld['execution']['workflowId'].should.equal('ab1234')
ld['execution'].should.contain('runId')
ld['cancelRequested'].should.be.false
ld.should.contain('startTimestamp')
def test_workflow_execution_schedule_decision_task():
wfe = make_workflow_execution()
wfe.open_counts["openDecisionTasks"].should.equal(0)
wfe.schedule_decision_task()
wfe.open_counts["openDecisionTasks"].should.equal(1)
def test_workflow_execution_start_decision_task():
wfe = make_workflow_execution()
wfe.schedule_decision_task()
dt = wfe.decision_tasks[0]
wfe.start_decision_task(dt.task_token, identity="srv01")
dt = wfe.decision_tasks[0]
dt.state.should.equal("STARTED")
wfe.events()[-1].event_type.should.equal("DecisionTaskStarted")
wfe.events()[-1].event_attributes["identity"].should.equal("srv01")
def test_workflow_execution_history_events_ids():
wfe = make_workflow_execution()
wfe._add_event("WorkflowExecutionStarted")
wfe._add_event("DecisionTaskScheduled")
wfe._add_event("DecisionTaskStarted")
ids = [evt.event_id for evt in wfe.events()]
ids.should.equal([1, 2, 3])
@freeze_time("2015-01-01 12:00:00")
def test_workflow_execution_start():
wfe = make_workflow_execution()
wfe.events().should.equal([])
wfe.start()
wfe.start_timestamp.should.equal(1420113600.0)
wfe.events().should.have.length_of(2)
wfe.events()[0].event_type.should.equal("WorkflowExecutionStarted")
wfe.events()[1].event_type.should.equal("DecisionTaskScheduled")
@freeze_time("2015-01-02 12:00:00")
def test_workflow_execution_complete():
wfe = make_workflow_execution()
wfe.complete(123, result="foo")
wfe.execution_status.should.equal("CLOSED")
wfe.close_status.should.equal("COMPLETED")
wfe.close_timestamp.should.equal(1420200000.0)
wfe.events()[-1].event_type.should.equal("WorkflowExecutionCompleted")
wfe.events()[-1].event_attributes["decisionTaskCompletedEventId"].should.equal(123)
wfe.events()[-1].event_attributes["result"].should.equal("foo")
@freeze_time("2015-01-02 12:00:00")
def test_workflow_execution_fail():
wfe = make_workflow_execution()
wfe.fail(123, details="some details", reason="my rules")
wfe.execution_status.should.equal("CLOSED")
wfe.close_status.should.equal("FAILED")
wfe.close_timestamp.should.equal(1420200000.0)
wfe.events()[-1].event_type.should.equal("WorkflowExecutionFailed")
wfe.events()[-1].event_attributes["decisionTaskCompletedEventId"].should.equal(123)
wfe.events()[-1].event_attributes["details"].should.equal("some details")
wfe.events()[-1].event_attributes["reason"].should.equal("my rules")
@freeze_time("2015-01-01 12:00:00")
def test_workflow_execution_schedule_activity_task():
wfe = make_workflow_execution()
wfe.latest_activity_task_timestamp.should.be.none
wfe.schedule_activity_task(123, VALID_ACTIVITY_TASK_ATTRIBUTES)
wfe.latest_activity_task_timestamp.should.equal(1420113600.0)
wfe.open_counts["openActivityTasks"].should.equal(1)
last_event = wfe.events()[-1]
last_event.event_type.should.equal("ActivityTaskScheduled")
last_event.event_attributes["decisionTaskCompletedEventId"].should.equal(123)
last_event.event_attributes["taskList"]["name"].should.equal("task-list-name")
wfe.activity_tasks.should.have.length_of(1)
task = wfe.activity_tasks[0]
task.activity_id.should.equal("my-activity-001")
task.activity_type.name.should.equal("test-activity")
wfe.domain.activity_task_lists["task-list-name"].should.contain(task)
def test_workflow_execution_schedule_activity_task_without_task_list_should_take_default():
wfe = make_workflow_execution()
wfe.domain.add_type(
ActivityType("test-activity", "v1.2", task_list="foobar")
)
wfe.schedule_activity_task(123, {
"activityId": "my-activity-001",
"activityType": {"name": "test-activity", "version": "v1.2"},
"scheduleToStartTimeout": "600",
"scheduleToCloseTimeout": "600",
"startToCloseTimeout": "600",
"heartbeatTimeout": "300",
})
wfe.open_counts["openActivityTasks"].should.equal(1)
last_event = wfe.events()[-1]
last_event.event_type.should.equal("ActivityTaskScheduled")
last_event.event_attributes["taskList"]["name"].should.equal("foobar")
task = wfe.activity_tasks[0]
wfe.domain.activity_task_lists["foobar"].should.contain(task)
def test_workflow_execution_schedule_activity_task_should_fail_if_wrong_attributes():
wfe = make_workflow_execution()
at = ActivityType("test-activity", "v1.1")
at.status = "DEPRECATED"
wfe.domain.add_type(at)
wfe.domain.add_type(ActivityType("test-activity", "v1.2"))
hsh = {
"activityId": "my-activity-001",
"activityType": {"name": "test-activity-does-not-exists", "version": "v1.1"},
}
wfe.schedule_activity_task(123, hsh)
last_event = wfe.events()[-1]
last_event.event_type.should.equal("ScheduleActivityTaskFailed")
last_event.event_attributes["cause"].should.equal("ACTIVITY_TYPE_DOES_NOT_EXIST")
hsh["activityType"]["name"] = "test-activity"
wfe.schedule_activity_task(123, hsh)
last_event = wfe.events()[-1]
last_event.event_type.should.equal("ScheduleActivityTaskFailed")
last_event.event_attributes["cause"].should.equal("ACTIVITY_TYPE_DEPRECATED")
hsh["activityType"]["version"] = "v1.2"
wfe.schedule_activity_task(123, hsh)
last_event = wfe.events()[-1]
last_event.event_type.should.equal("ScheduleActivityTaskFailed")
last_event.event_attributes["cause"].should.equal("DEFAULT_TASK_LIST_UNDEFINED")
hsh["taskList"] = {"name": "foobar"}
wfe.schedule_activity_task(123, hsh)
last_event = wfe.events()[-1]
last_event.event_type.should.equal("ScheduleActivityTaskFailed")
last_event.event_attributes["cause"].should.equal("DEFAULT_SCHEDULE_TO_START_TIMEOUT_UNDEFINED")
hsh["scheduleToStartTimeout"] = "600"
wfe.schedule_activity_task(123, hsh)
last_event = wfe.events()[-1]
last_event.event_type.should.equal("ScheduleActivityTaskFailed")
last_event.event_attributes["cause"].should.equal("DEFAULT_SCHEDULE_TO_CLOSE_TIMEOUT_UNDEFINED")
hsh["scheduleToCloseTimeout"] = "600"
wfe.schedule_activity_task(123, hsh)
last_event = wfe.events()[-1]
last_event.event_type.should.equal("ScheduleActivityTaskFailed")
last_event.event_attributes["cause"].should.equal("DEFAULT_START_TO_CLOSE_TIMEOUT_UNDEFINED")
hsh["startToCloseTimeout"] = "600"
wfe.schedule_activity_task(123, hsh)
last_event = wfe.events()[-1]
last_event.event_type.should.equal("ScheduleActivityTaskFailed")
last_event.event_attributes["cause"].should.equal("DEFAULT_HEARTBEAT_TIMEOUT_UNDEFINED")
wfe.open_counts["openActivityTasks"].should.equal(0)
wfe.activity_tasks.should.have.length_of(0)
wfe.domain.activity_task_lists.should.have.length_of(0)
hsh["heartbeatTimeout"] = "300"
wfe.schedule_activity_task(123, hsh)
last_event = wfe.events()[-1]
last_event.event_type.should.equal("ActivityTaskScheduled")
task = wfe.activity_tasks[0]
wfe.domain.activity_task_lists["foobar"].should.contain(task)
wfe.open_counts["openDecisionTasks"].should.equal(0)
wfe.open_counts["openActivityTasks"].should.equal(1)
def test_workflow_execution_schedule_activity_task_failure_triggers_new_decision():
wfe = make_workflow_execution()
wfe.start()
task_token = wfe.decision_tasks[-1].task_token
wfe.start_decision_task(task_token)
wfe.complete_decision_task(
task_token,
execution_context="free-form execution context",
decisions=[
{
"decisionType": "ScheduleActivityTask",
"scheduleActivityTaskDecisionAttributes": {
"activityId": "my-activity-001",
"activityType": {
"name": "test-activity-does-not-exist",
"version": "v1.2"
},
}
},
{
"decisionType": "ScheduleActivityTask",
"scheduleActivityTaskDecisionAttributes": {
"activityId": "my-activity-001",
"activityType": {
"name": "test-activity-does-not-exist",
"version": "v1.2"
},
}
},
])
wfe.latest_execution_context.should.equal("free-form execution context")
wfe.open_counts["openActivityTasks"].should.equal(0)
wfe.open_counts["openDecisionTasks"].should.equal(1)
last_events = wfe.events()[-3:]
last_events[0].event_type.should.equal("ScheduleActivityTaskFailed")
last_events[1].event_type.should.equal("ScheduleActivityTaskFailed")
last_events[2].event_type.should.equal("DecisionTaskScheduled")
def test_workflow_execution_schedule_activity_task_with_same_activity_id():
wfe = make_workflow_execution()
wfe.schedule_activity_task(123, VALID_ACTIVITY_TASK_ATTRIBUTES)
wfe.open_counts["openActivityTasks"].should.equal(1)
last_event = wfe.events()[-1]
last_event.event_type.should.equal("ActivityTaskScheduled")
wfe.schedule_activity_task(123, VALID_ACTIVITY_TASK_ATTRIBUTES)
wfe.open_counts["openActivityTasks"].should.equal(1)
last_event = wfe.events()[-1]
last_event.event_type.should.equal("ScheduleActivityTaskFailed")
last_event.event_attributes["cause"].should.equal("ACTIVITY_ID_ALREADY_IN_USE")
def test_workflow_execution_start_activity_task():
wfe = make_workflow_execution()
wfe.schedule_activity_task(123, VALID_ACTIVITY_TASK_ATTRIBUTES)
task_token = wfe.activity_tasks[-1].task_token
wfe.start_activity_task(task_token, identity="worker01")
task = wfe.activity_tasks[-1]
task.state.should.equal("STARTED")
wfe.events()[-1].event_type.should.equal("ActivityTaskStarted")
wfe.events()[-1].event_attributes["identity"].should.equal("worker01")
def test_complete_activity_task():
wfe = make_workflow_execution()
wfe.schedule_activity_task(123, VALID_ACTIVITY_TASK_ATTRIBUTES)
task_token = wfe.activity_tasks[-1].task_token
wfe.open_counts["openActivityTasks"].should.equal(1)
wfe.open_counts["openDecisionTasks"].should.equal(0)
wfe.start_activity_task(task_token, identity="worker01")
wfe.complete_activity_task(task_token, result="a superb result")
task = wfe.activity_tasks[-1]
task.state.should.equal("COMPLETED")
wfe.events()[-2].event_type.should.equal("ActivityTaskCompleted")
wfe.events()[-1].event_type.should.equal("DecisionTaskScheduled")
wfe.open_counts["openActivityTasks"].should.equal(0)
wfe.open_counts["openDecisionTasks"].should.equal(1)
def test_terminate():
wfe = make_workflow_execution()
wfe.schedule_decision_task()
wfe.terminate()
wfe.execution_status.should.equal("CLOSED")
wfe.close_status.should.equal("TERMINATED")
wfe.close_cause.should.equal("OPERATOR_INITIATED")
wfe.open_counts["openDecisionTasks"].should.equal(1)
last_event = wfe.events()[-1]
last_event.event_type.should.equal("WorkflowExecutionTerminated")
# take default child_policy if not provided (as here)
last_event.event_attributes["childPolicy"].should.equal("ABANDON")
def test_first_timeout():
wfe = make_workflow_execution()
wfe.first_timeout().should.be.none
with freeze_time("2015-01-01 12:00:00"):
wfe.start()
wfe.first_timeout().should.be.none
with freeze_time("2015-01-01 14:01"):
# 2 hours timeout reached
wfe.first_timeout().should.be.a(Timeout)
# See moto/swf/models/workflow_execution.py "_process_timeouts()" for more details
def test_timeouts_are_processed_in_order_and_reevaluated():
# Let's make a Workflow Execution with the following properties:
# - execution start to close timeout of 8 mins
# - (decision) task start to close timeout of 5 mins
#
# Now start the workflow execution, and look at the history 15 mins later:
# - a first decision task is fired just after workflow execution start
# - the first decision task should have timed out after 5 mins
# - that fires a new decision task (which we hack to start automatically)
# - then the workflow timeouts after 8 mins (shows gradual reevaluation)
# - but the last scheduled decision task should *not* timeout (workflow closed)
with freeze_time("2015-01-01 12:00:00"):
wfe = make_workflow_execution(
execution_start_to_close_timeout=8 * 60,
task_start_to_close_timeout=5 * 60,
)
# decision will automatically start
wfe = auto_start_decision_tasks(wfe)
wfe.start()
event_idx = len(wfe.events())
with freeze_time("2015-01-01 12:08:00"):
wfe._process_timeouts()
event_types = [e.event_type for e in wfe.events()[event_idx:]]
event_types.should.equal([
"DecisionTaskTimedOut",
"DecisionTaskScheduled",
"DecisionTaskStarted",
"WorkflowExecutionTimedOut",
])
| {
"repo_name": "tootedom/moto",
"path": "tests/test_swf/models/test_workflow_execution.py",
"copies": "7",
"size": "18405",
"license": "apache-2.0",
"hash": -4562475565737336000,
"line_mean": 36.5612244898,
"line_max": 100,
"alpha_frac": 0.6715023092,
"autogenerated": false,
"ratio": 3.4121245828698554,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0004187855712629772,
"num_lines": 490
} |
from freezegun import freeze_time
import unittest.mock as mock
from tests.plugins import PluginTestCase
from plugins.owleague import OwleaguePlugin
f = "plugins.owleague.get_owl_data"
@freeze_time("2017-12-08 10:00:00")
class OwleaguePluginTest(PluginTestCase):
def create_plugin(self):
return OwleaguePlugin(self.bot, self.channel)
def test_returns_current_and_next_match(self):
data = {
"liveMatch": {
"liveStatus": "LIVE",
"competitors": [{"name": "team1"}, {"name": "team2"}],
},
"nextMatch": {
"liveStatus": "UPCOMING",
"startDate": "2017-12-10T15:00:00Z+00:00",
"competitors": [{"name": "team3"}, {"name": "team4"}],
},
}
with mock.patch(f, return_value=data):
ret = self.cmd("owl")
self.assertEqual(
ret,
"Live now: team1 vs team2 -- Next match: team3 vs team4 at 2017-12-10 15:00 +0000 (in 2d 5h) -- https://overwatchleague.com",
)
def test_returns_next_match(self):
data = {
"liveMatch": {
"liveStatus": "UPCOMING",
"startDate": "2017-12-10T15:00:00Z+00:00",
"competitors": [{"name": "team1"}, {"name": "team2"}],
},
"nextMatch": {},
}
with mock.patch(f, return_value=data):
ret = self.cmd("owl")
self.assertEqual(
ret,
"Next match: team1 vs team2 at 2017-12-10 15:00 +0000 (in 2d 5h) -- https://overwatchleague.com",
)
def test_returns_nothing_when_no_matches(self):
data = {"liveMatch": {}, "nextMatch": {}}
with mock.patch(f, return_value=data):
ret = self.cmd("owl")
self.assertEqual(
ret, "No matches live or scheduled -- https://overwatchleague.com"
)
def test_ticker_returns_when_match_goes_live(self):
data = {
"liveMatch": {
"liveStatus": "UPCOMING",
"startDate": "2017-12-10T15:00:00Z+00:00",
"competitors": [{"name": "team1"}, {"name": "team2"}],
},
"nextMatch": {},
}
with mock.patch(f, return_value=data):
ret1 = self.plugin.ticker()
data["liveMatch"]["liveStatus"] = "LIVE"
with mock.patch(f, return_value=data):
ret2 = self.plugin.ticker()
self.assertFalse(ret1)
self.assertTrue(ret2)
def test_ticker_does_not_repeat_itself(self):
data = {
"liveMatch": {
"liveStatus": "LIVE",
"competitors": [{"name": "team1"}, {"name": "team2"}],
},
"nextMatch": {
"liveStatus": "UPCOMING",
"startDate": "2017-12-10T15:00:00Z+00:00",
"competitors": [{"name": "team3"}, {"name": "team4"}],
},
}
with mock.patch(f, return_value=data):
ret1 = self.plugin.ticker()
ret2 = self.plugin.ticker()
self.assertTrue(ret1)
self.assertFalse(ret2)
def test_ticker_does_not_trigger_on_next_match_time_change(self):
data = {
"liveMatch": {
"liveStatus": "LIVE",
"competitors": [{"name": "team1"}, {"name": "team2"}],
},
"nextMatch": {
"liveStatus": "UPCOMING",
"startDate": "2017-12-10T15:00:00Z+00:00",
"competitors": [{"name": "team3"}, {"name": "team4"}],
},
}
with mock.patch(f, return_value=data):
ret1 = self.plugin.ticker()
data["nextMatch"]["startDate"] = "2017-12-10T16:00:00Z+00:00"
with mock.patch(f, return_value=data):
ret2 = self.plugin.ticker()
self.assertTrue(ret1)
self.assertFalse(ret2)
def test_ticker_updates_when_match_changes(self):
data = {
"liveMatch": {
"liveStatus": "LIVE",
"competitors": [{"name": "team1"}, {"name": "team2"}],
},
"nextMatch": {
"liveStatus": "UPCOMING",
"startDate": "2017-12-10T15:00:00Z+00:00",
"competitors": [{"name": "team3"}, {"name": "team4"}],
},
}
with mock.patch(f, return_value=data):
ret1 = self.plugin.ticker()
data["liveMatch"]["competitors"][0]["name"] = "team3"
data["liveMatch"]["competitors"][1]["name"] = "team4"
data["nextMatch"]["competitors"][0]["name"] = "team5"
data["nextMatch"]["competitors"][1]["name"] = "team6"
with mock.patch(f, return_value=data):
ret2 = self.plugin.ticker()
self.assertTrue(ret1)
self.assertTrue(ret2)
self.assertNotEqual(ret1, ret2)
def test_ticker_does_not_flap(self):
data = [
{
"liveMatch": {
"liveStatus": "LIVE",
"competitors": [
{"name": "team%d" % (i + 1)},
{"name": "team%d" % (i + 2)},
],
},
"nextMatch": {
"liveStatus": "UPCOMING",
"startDate": "2017-12-10T%d:00:00Z+00:00" % (15 + i * 2),
"competitors": [
{"name": "team%d" % (i + 3)},
{"name": "team%d" % (i + 4)},
],
},
}
for i in range(0, 3)
]
with mock.patch(f, return_value=data[0]):
ret = self.plugin.ticker()
self.assertIsNotNone(ret)
self.assertIn("team1 vs team2", ret)
with mock.patch(f, return_value=data[1]):
self.assertIsNotNone(self.plugin.ticker())
self.assertIn("team3 vs team4", ret)
with mock.patch(f, return_value=data[0]):
self.assertIsNone(self.plugin.ticker())
with mock.patch(f, return_value=data[1]):
self.assertIsNone(self.plugin.ticker())
with mock.patch(f, return_value=data[0]):
self.assertIsNone(self.plugin.ticker())
self.assertIsNone(self.plugin.ticker())
with mock.patch(f, return_value=data[1]):
self.assertIsNone(self.plugin.ticker())
self.assertIsNone(self.plugin.ticker())
with mock.patch(f, return_value=data[2]):
ret = self.plugin.ticker()
self.assertIsNotNone(ret)
self.assertIn("team3 vs team4", ret)
def test_ticker_does_not_flap_between_live_and_not_live(self):
live_data = {
"liveMatch": {
"liveStatus": "LIVE",
"competitors": [{"name": "team1"}, {"name": "team2"}],
},
"nextMatch": {
"liveStatus": "UPCOMING",
"startDate": "2017-12-10T15:00:00Z+00:00",
"competitors": [{"name": "team1"}, {"name": "team2"}],
},
}
upcoming_data = {
"liveMatch": {},
"nextMatch": {
"liveStatus": "UPCOMING",
"startDate": "2017-12-10T15:00:00Z+00:00",
"competitors": [{"name": "team1"}, {"name": "team2"}],
},
}
with mock.patch(f, return_value=live_data):
ret = self.plugin.ticker()
self.assertIsNotNone(ret)
self.assertIn("team1 vs team2", ret)
with mock.patch(f, return_value=upcoming_data):
self.assertIsNone(self.plugin.ticker())
with mock.patch(f, return_value=live_data):
self.assertIsNone(self.plugin.ticker())
def test_ticker_resets_after_several_not_live_results(self):
live_data = {
"liveMatch": {
"liveStatus": "LIVE",
"competitors": [{"name": "team1"}, {"name": "team2"}],
},
"nextMatch": {
"liveStatus": "UPCOMING",
"startDate": "2017-12-10T15:00:00Z+00:00",
"competitors": [{"name": "team1"}, {"name": "team2"}],
},
}
upcoming_data = {
"liveMatch": {},
"nextMatch": {
"liveStatus": "UPCOMING",
"startDate": "2017-12-10T15:00:00Z+00:00",
"competitors": [{"name": "team1"}, {"name": "team2"}],
},
}
with mock.patch(f, return_value=live_data):
ret = self.plugin.ticker()
self.assertIsNotNone(ret)
self.assertIn("team1 vs team2", ret)
with mock.patch(f, return_value=upcoming_data):
self.assertIsNone(self.plugin.ticker())
with mock.patch(f, return_value=upcoming_data):
self.assertIsNone(self.plugin.ticker())
with mock.patch(f, return_value=live_data):
self.assertIsNotNone(ret)
self.assertIn("team1 vs team2", ret)
def test_does_not_flap_between_locales(self):
data = {
"liveMatch": {
"liveStatus": "LIVE",
"competitors": [{"id": 1, "name": "team1"}, {"id": 2, "name": "team2"}],
},
"nextMatch": {
"liveStatus": "UPCOMING",
"startDate": "2017-12-10T15:00:00Z+00:00",
"competitors": [{"id": 3, "name": "team3"}, {"id": 4, "name": "team4"}],
},
}
with mock.patch(f, return_value=data):
ret1 = self.plugin.ticker()
data["liveMatch"]["competitors"][0]["name"] = "lag1"
data["liveMatch"]["competitors"][1]["name"] = "lag2"
data["nextMatch"]["competitors"][0]["name"] = "lag3"
data["nextMatch"]["competitors"][1]["name"] = "lag3"
with mock.patch(f, return_value=data):
ret2 = self.plugin.ticker()
self.assertTrue(ret1)
self.assertFalse(ret2)
| {
"repo_name": "anlutro/botologist",
"path": "tests/plugins/owleague_test.py",
"copies": "1",
"size": "10033",
"license": "mit",
"hash": -2673475916145381400,
"line_mean": 34.7046263345,
"line_max": 137,
"alpha_frac": 0.4837037775,
"autogenerated": false,
"ratio": 3.6025134649910235,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4586217242491023,
"avg_score": null,
"num_lines": null
} |
from freezegun import freeze_time
from app.dao.event_dates_dao import dao_get_event_date_by_id
from app.dao.events_dao import (
dao_create_event,
dao_delete_event,
dao_update_event,
dao_get_events,
dao_get_event_by_id,
dao_get_events_in_year,
dao_get_future_events,
dao_get_limited_events,
dao_get_past_year_events,
)
from app.models import Event, EventDate, RejectReason
from tests.db import create_event, create_event_date, create_speaker
class WhenUsingEventsDAO(object):
def it_creates_an_event(self, db, db_session):
event = create_event()
assert Event.query.count() == 1
event_from_db = Event.query.first()
assert event == event_from_db
def it_deletes_an_event(self, db, db_session):
event = create_event()
assert Event.query.count() == 1
dao_delete_event(event.id)
assert Event.query.count() == 0
def it_creates_an_event_with_event_dates(self, db, db_session):
event_date = create_event_date()
event = create_event(event_dates=[event_date])
assert Event.query.count() == 1
event_from_db = Event.query.first()
assert event == event_from_db
assert event_from_db.event_dates[0] == event_date
def it_deletes_an_event_with_event_dates(self, db, db_session):
event_date = create_event_date()
event = create_event(event_dates=[event_date])
assert Event.query.count() == 1
dao_delete_event(event.id)
assert Event.query.count() == 0
def it_updates_an_event_dao(self, db, db_session, sample_event):
dao_update_event(sample_event.id, title='new title')
event_from_db = Event.query.filter(Event.id == sample_event.id).first()
assert sample_event.title == event_from_db.title
def it_updates_an_event_dao_with_new_event_date(self, db, db_session, sample_event):
speaker = create_speaker(name='John Brown')
event_date = create_event_date(event_datetime='2018-01-20T19:00:00', speakers=[speaker])
dao_update_event(sample_event.id, event_dates=[event_date])
event_from_db = Event.query.filter(Event.id == sample_event.id).first()
assert sample_event.event_dates[0] == event_from_db.event_dates[0]
event_date2 = create_event_date(event_datetime='2018-02-20T19:00:00', speakers=[speaker])
dao_update_event(sample_event.id, event_dates=[event_date2])
event_dates = EventDate.query.all()
assert len(event_dates) == 1
def it_updates_an_event_dao_with_new_speaker(self, db, db_session):
speaker = create_speaker(name='John Brown')
event_date = create_event_date(event_datetime='2018-01-20T19:00:00', speakers=[speaker])
event = create_event(event_dates=[event_date])
speaker2 = create_speaker(name='Jim Blue')
db_event_date = dao_get_event_date_by_id(event_date.id)
db_event_date.speakers = [speaker, speaker2]
dao_update_event(event.id, event_dates=[db_event_date])
event_from_db = Event.query.filter(Event.id == event.id).first()
assert event.event_dates[0] == event_from_db.event_dates[0]
event_dates = EventDate.query.all()
assert len(event_dates) == 1
def it_gets_all_events(self, db, db_session, sample_event, sample_event_type):
events = [create_event(title='test title 2', event_type_id=sample_event_type.id), sample_event]
events_from_db = dao_get_events()
assert Event.query.count() == 2
assert set(events) == set(events_from_db)
def it_gets_event_by_id(self, db, db_session, sample_event, sample_event_type):
create_event(title='test title 2', event_type_id=sample_event_type.id)
event_from_db = dao_get_event_by_id(sample_event.id)
assert Event.query.count() == 2
assert event_from_db == sample_event
def it_gets_event_by_id_with_reject_reason(self, db_session, sample_event_type, sample_reject_reason):
event_from_db = dao_get_event_by_id(sample_reject_reason.event_id)
assert Event.query.count() == 1
assert event_from_db.reject_reasons == [sample_reject_reason]
@freeze_time("2018-01-10T19:00:00")
def it_gets_all_future_events(self, db, db_session, sample_event_with_dates, sample_event_type):
event = create_event(
title='future event',
event_type_id=sample_event_type.id,
event_dates=[create_event_date(event_datetime='2018-01-20T19:00:00')]
)
events_from_db = dao_get_future_events()
assert Event.query.count() == 2
assert len(events_from_db) == 1
assert events_from_db[0] == event
@freeze_time("2018-01-10T19:00:00")
def it_gets_past_year_events(self, db, db_session, sample_event_with_dates, sample_event_type):
create_event(
title='way past last year event',
event_type_id=sample_event_type.id,
event_dates=[create_event_date(event_datetime='2016-01-01T19:00:00')]
)
create_event(
title='future event',
event_type_id=sample_event_type.id,
event_dates=[create_event_date(event_datetime='2018-01-20T19:00:00')]
)
events_from_db = dao_get_past_year_events()
assert Event.query.count() == 3
assert len(events_from_db) == 1
assert events_from_db[0] == sample_event_with_dates
def it_gets_events_in_year(self, db, db_session, sample_event_with_dates, sample_event_type):
event_2 = create_event(
title='2018 event',
event_type_id=sample_event_type.id,
event_dates=[create_event_date(event_datetime='2018-12-31T23:59:00')]
)
create_event(
title='2017 event',
event_type_id=sample_event_type.id,
event_dates=[create_event_date(event_datetime='2017-12-31T23:59:59')]
)
create_event(
title='2019 event',
event_type_id=sample_event_type.id,
event_dates=[create_event_date(event_datetime='2019-01-01T00:00:01')]
)
events_from_db = dao_get_events_in_year(2018)
assert len(events_from_db) == 2
assert events_from_db[0] == sample_event_with_dates
assert events_from_db[1] == event_2
def it_gets_limited_events(self, db, db_session, sample_event_with_dates, sample_event_type):
event_2 = create_event(
title='2018 event',
event_type_id=sample_event_type.id,
event_dates=[create_event_date(event_datetime='2018-12-31T23:59:00')]
)
create_event(
title='beyond limit',
event_type_id=sample_event_type.id,
event_dates=[create_event_date(event_datetime='2017-12-31T23:59:59')]
)
events_from_db = dao_get_limited_events(2)
assert len(events_from_db) == 2
assert events_from_db[0] == event_2
assert events_from_db[1] == sample_event_with_dates
| {
"repo_name": "NewAcropolis/api",
"path": "tests/app/dao/test_events_dao.py",
"copies": "1",
"size": "7045",
"license": "mit",
"hash": 2695704700863603000,
"line_mean": 35.6927083333,
"line_max": 106,
"alpha_frac": 0.6249822569,
"autogenerated": false,
"ratio": 3.275220827522083,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9394399885045841,
"avg_score": 0.001160639875248386,
"num_lines": 192
} |
from freezegun import freeze_time
from app.na_celery.event_tasks import send_event_email_reminder
from app.dao.events_dao import dao_update_event
from app.models import APPROVED
class WhenProcessingSendEventEmailReminderTask:
@freeze_time("2017-12-20T10:00:00")
def it_sends_the_event_email_reminder(self, mocker, db_session, sample_event_with_dates, sample_admin_user):
dao_update_event(sample_event_with_dates.id, event_state=APPROVED)
mock_send_email = mocker.patch('app.na_celery.event_tasks.send_smtp_email', return_value=200)
send_event_email_reminder()
assert mock_send_email.call_args[0][0] == sample_admin_user.email
assert mock_send_email.call_args[0][1] == f"Event: {sample_event_with_dates.title} email reminder"
@freeze_time("2017-12-01T10:00:00")
def it_does_not_send_the_event_email_reminder_too_early(
self, mocker, db_session, sample_event_with_dates, sample_admin_user
):
mock_send_email = mocker.patch('app.na_celery.event_tasks.send_smtp_email', return_value=200)
send_event_email_reminder()
assert not mock_send_email.called
@freeze_time("2017-12-20T10:00:00")
def it_reports_an_error_if_sending_reminder_fails(
self, mocker, db_session, sample_event_with_dates, sample_admin_user
):
dao_update_event(sample_event_with_dates.id, event_state=APPROVED)
mock_send_email = mocker.patch('app.na_celery.event_tasks.send_smtp_email', return_value=503)
mock_logger = mocker.patch('app.na_celery.event_tasks.current_app.logger.error')
send_event_email_reminder()
assert mock_send_email.call_args[0][0] == sample_admin_user.email
assert mock_send_email.call_args[0][1] == f"Event: {sample_event_with_dates.title} email reminder"
assert mock_logger.called
assert mock_logger.call_args[0][0] == f"Problem sending reminder email Event"\
f": {sample_event_with_dates.title} email reminder for {sample_admin_user.id}, status code: 503"
| {
"repo_name": "NewAcropolis/api",
"path": "tests/app/na_celery/test_event_tasks.py",
"copies": "1",
"size": "2042",
"license": "mit",
"hash": 2828527517430938600,
"line_mean": 45.4090909091,
"line_max": 112,
"alpha_frac": 0.6978452498,
"autogenerated": false,
"ratio": 3.1609907120743035,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.43588359618743033,
"avg_score": null,
"num_lines": null
} |
from freezegun import freeze_time
from great_expectations.checkpoint.actions import StoreMetricsAction
from great_expectations.core import ExpectationConfiguration
from great_expectations.core.expectation_validation_result import (
ExpectationSuiteValidationResult,
ExpectationValidationResult,
)
from great_expectations.core.metric import ValidationMetricIdentifier
from great_expectations.core.run_identifier import RunIdentifier
from great_expectations.data_context.types.resource_identifiers import (
ExpectationSuiteIdentifier,
ValidationResultIdentifier,
)
@freeze_time("09/26/2019 13:42:41")
def test_StoreMetricsAction(basic_in_memory_data_context_for_validation_operator):
action = StoreMetricsAction(
data_context=basic_in_memory_data_context_for_validation_operator,
requested_metrics={
"*": [
"statistics.evaluated_expectations",
"statistics.successful_expectations",
]
},
target_store_name="metrics_store",
)
run_id = RunIdentifier(run_name="bar")
validation_result = ExpectationSuiteValidationResult(
success=False,
meta={"expectation_suite_name": "foo", "run_id": run_id},
statistics={"evaluated_expectations": 5, "successful_expectations": 3},
)
# Run the action and store our metrics
action.run(
validation_result,
ValidationResultIdentifier.from_object(validation_result),
data_asset=None,
)
validation_result = ExpectationSuiteValidationResult(
success=False,
meta={"expectation_suite_name": "foo.warning", "run_id": run_id},
statistics={"evaluated_expectations": 8, "successful_expectations": 4},
)
action.run(
validation_result,
ValidationResultIdentifier.from_object(validation_result),
data_asset=None,
)
assert (
basic_in_memory_data_context_for_validation_operator.stores[
"metrics_store"
].get(
ValidationMetricIdentifier(
run_id=run_id,
data_asset_name=None,
expectation_suite_identifier=ExpectationSuiteIdentifier("foo"),
metric_name="statistics.evaluated_expectations",
metric_kwargs_id=None,
)
)
== 5
)
assert (
basic_in_memory_data_context_for_validation_operator.stores[
"metrics_store"
].get(
ValidationMetricIdentifier(
run_id=run_id,
data_asset_name=None,
expectation_suite_identifier=ExpectationSuiteIdentifier("foo"),
metric_name="statistics.successful_expectations",
metric_kwargs_id=None,
)
)
== 3
)
assert (
basic_in_memory_data_context_for_validation_operator.stores[
"metrics_store"
].get(
ValidationMetricIdentifier(
run_id=run_id,
data_asset_name=None,
expectation_suite_identifier=ExpectationSuiteIdentifier("foo.warning"),
metric_name="statistics.evaluated_expectations",
metric_kwargs_id=None,
)
)
== 8
)
assert (
basic_in_memory_data_context_for_validation_operator.stores[
"metrics_store"
].get(
ValidationMetricIdentifier(
run_id=run_id,
data_asset_name=None,
expectation_suite_identifier=ExpectationSuiteIdentifier("foo.warning"),
metric_name="statistics.successful_expectations",
metric_kwargs_id=None,
)
)
== 4
)
@freeze_time("09/26/2019 13:42:41")
def test_StoreMetricsAction_column_metric(
basic_in_memory_data_context_for_validation_operator,
):
action = StoreMetricsAction(
data_context=basic_in_memory_data_context_for_validation_operator,
requested_metrics={
"*": [
{
"column": {
"provider_id": [
"expect_column_values_to_be_unique.result.unexpected_count"
]
}
},
"statistics.evaluated_expectations",
"statistics.successful_expectations",
]
},
target_store_name="metrics_store",
)
run_id = RunIdentifier(run_name="bar")
validation_result = ExpectationSuiteValidationResult(
success=False,
meta={"expectation_suite_name": "foo", "run_id": run_id},
results=[
ExpectationValidationResult(
meta={},
result={
"element_count": 10,
"missing_count": 0,
"missing_percent": 0.0,
"unexpected_count": 7,
"unexpected_percent": 0.0,
"unexpected_percent_nonmissing": 0.0,
"partial_unexpected_list": [],
},
success=True,
expectation_config=ExpectationConfiguration(
expectation_type="expect_column_values_to_be_unique",
kwargs={"column": "provider_id", "result_format": "BASIC"},
),
exception_info=None,
)
],
statistics={"evaluated_expectations": 5, "successful_expectations": 3},
)
action.run(
validation_result,
ValidationResultIdentifier.from_object(validation_result),
data_asset=None,
)
assert (
basic_in_memory_data_context_for_validation_operator.stores[
"metrics_store"
].get(
ValidationMetricIdentifier(
run_id=run_id,
data_asset_name=None,
expectation_suite_identifier=ExpectationSuiteIdentifier("foo"),
metric_name="expect_column_values_to_be_unique.result.unexpected_count",
metric_kwargs_id="column=provider_id",
)
)
== 7
)
| {
"repo_name": "great-expectations/great_expectations",
"path": "tests/actions/test_store_metric_action.py",
"copies": "1",
"size": "6174",
"license": "apache-2.0",
"hash": -1452434998005617400,
"line_mean": 32.0160427807,
"line_max": 88,
"alpha_frac": 0.5704567541,
"autogenerated": false,
"ratio": 4.546391752577319,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5616848506677319,
"avg_score": null,
"num_lines": null
} |
from freezegun import freeze_time
from moto.swf.exceptions import SWFWorkflowExecutionClosedError
from moto.swf.models import (
ActivityTask,
ActivityType,
Timeout,
)
from ..utils import (
ACTIVITY_TASK_TIMEOUTS,
make_workflow_execution,
process_first_timeout,
)
def test_activity_task_creation():
wfe = make_workflow_execution()
task = ActivityTask(
activity_id="my-activity-123",
activity_type="foo",
input="optional",
scheduled_event_id=117,
workflow_execution=wfe,
timeouts=ACTIVITY_TASK_TIMEOUTS,
)
task.workflow_execution.should.equal(wfe)
task.state.should.equal("SCHEDULED")
task.task_token.should_not.be.empty
task.started_event_id.should.be.none
task.start(123)
task.state.should.equal("STARTED")
task.started_event_id.should.equal(123)
task.complete()
task.state.should.equal("COMPLETED")
# NB: this doesn't make any sense for SWF, a task shouldn't go from a
# "COMPLETED" state to a "FAILED" one, but this is an internal state on our
# side and we don't care about invalid state transitions for now.
task.fail()
task.state.should.equal("FAILED")
def test_activity_task_full_dict_representation():
wfe = make_workflow_execution()
at = ActivityTask(
activity_id="my-activity-123",
activity_type=ActivityType("foo", "v1.0"),
input="optional",
scheduled_event_id=117,
timeouts=ACTIVITY_TASK_TIMEOUTS,
workflow_execution=wfe,
)
at.start(1234)
fd = at.to_full_dict()
fd["activityId"].should.equal("my-activity-123")
fd["activityType"]["version"].should.equal("v1.0")
fd["input"].should.equal("optional")
fd["startedEventId"].should.equal(1234)
fd.should.contain("taskToken")
fd["workflowExecution"].should.equal(wfe.to_short_dict())
at.start(1234)
fd = at.to_full_dict()
fd["startedEventId"].should.equal(1234)
def test_activity_task_reset_heartbeat_clock():
wfe = make_workflow_execution()
with freeze_time("2015-01-01 12:00:00"):
task = ActivityTask(
activity_id="my-activity-123",
activity_type="foo",
input="optional",
scheduled_event_id=117,
timeouts=ACTIVITY_TASK_TIMEOUTS,
workflow_execution=wfe,
)
task.last_heartbeat_timestamp.should.equal(1420113600.0)
with freeze_time("2015-01-01 13:00:00"):
task.reset_heartbeat_clock()
task.last_heartbeat_timestamp.should.equal(1420117200.0)
def test_activity_task_first_timeout():
wfe = make_workflow_execution()
with freeze_time("2015-01-01 12:00:00"):
task = ActivityTask(
activity_id="my-activity-123",
activity_type="foo",
input="optional",
scheduled_event_id=117,
timeouts=ACTIVITY_TASK_TIMEOUTS,
workflow_execution=wfe,
)
task.first_timeout().should.be.none
# activity task timeout is 300s == 5mins
with freeze_time("2015-01-01 12:06:00"):
task.first_timeout().should.be.a(Timeout)
process_first_timeout(task)
task.state.should.equal("TIMED_OUT")
task.timeout_type.should.equal("HEARTBEAT")
def test_activity_task_cannot_timeout_on_closed_workflow_execution():
with freeze_time("2015-01-01 12:00:00"):
wfe = make_workflow_execution()
wfe.start()
with freeze_time("2015-01-01 13:58:00"):
task = ActivityTask(
activity_id="my-activity-123",
activity_type="foo",
input="optional",
scheduled_event_id=117,
timeouts=ACTIVITY_TASK_TIMEOUTS,
workflow_execution=wfe,
)
with freeze_time("2015-01-01 14:10:00"):
task.first_timeout().should.be.a(Timeout)
wfe.first_timeout().should.be.a(Timeout)
process_first_timeout(wfe)
task.first_timeout().should.be.none
def test_activity_task_cannot_change_state_on_closed_workflow_execution():
wfe = make_workflow_execution()
wfe.start()
task = ActivityTask(
activity_id="my-activity-123",
activity_type="foo",
input="optional",
scheduled_event_id=117,
timeouts=ACTIVITY_TASK_TIMEOUTS,
workflow_execution=wfe,
)
wfe.complete(123)
task.timeout.when.called_with(Timeout(task, 0, "foo")).should.throw(
SWFWorkflowExecutionClosedError)
task.complete.when.called_with().should.throw(SWFWorkflowExecutionClosedError)
task.fail.when.called_with().should.throw(SWFWorkflowExecutionClosedError)
| {
"repo_name": "dbfr3qs/moto",
"path": "tests/test_swf/models/test_activity_task.py",
"copies": "7",
"size": "4645",
"license": "apache-2.0",
"hash": -4757272046823138000,
"line_mean": 29.3594771242,
"line_max": 82,
"alpha_frac": 0.6419806243,
"autogenerated": false,
"ratio": 3.5539403213465954,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.7695920945646595,
"avg_score": null,
"num_lines": null
} |
from freezegun import freeze_time
from openinghours.tests.tests import OpeningHoursTestCase
class FormsTestCase(OpeningHoursTestCase):
def setUp(self):
super(FormsTestCase, self).setUp()
def tearDown(self):
super(FormsTestCase, self).tearDown()
def test_hours_are_published(self):
response = self.client.get('/')
self.assertContains(response, '8:30am to 12:00pm')
self.assertContains(response, '10:00am to 1:00pm')
def test_edit_form(self):
self.tearDown()
post_data = {
'day1_1-opens': '11:30', 'day1_1-shuts': '17:30',
'day2_1-opens': '11:30', 'day2_1-shuts': '17:30',
'day3_1-opens': '11:30', 'day3_1-shuts': '17:30',
'day4_1-opens': '11:30', 'day4_1-shuts': '17:30',
'day5_1-opens': '11:30', 'day5_1-shuts': '17:30',
'day6_1-opens': '11:30', 'day6_1-shuts': '13:30',
'day7_1-opens': '00:00', 'day7_1-shuts': '00:00',
'day1_2-opens': '00:00', 'day1_2-shuts': '00:00',
'day2_2-opens': '00:00', 'day2_2-shuts': '00:00',
'day3_2-opens': '00:00', 'day3_2-shuts': '00:00',
'day4_2-opens': '00:00', 'day4_2-shuts': '00:00',
'day5_2-opens': '00:00', 'day5_2-shuts': '00:00',
'day6_2-opens': '00:00', 'day6_2-shuts': '00:00',
'day7_2-opens': '00:00', 'day7_2-shuts': '00:00',
}
post = self.client.post('/edit/1', post_data)
resp = self.client.get('/edit/1')
self.assertContains(resp, '<option value="11:30" selected', count=6)
self.assertContains(resp, '<option value="17:30" selected', count=5)
self.assertContains(resp, '<option value="00:00">', count=7*2*2)
resp2 = self.client.get('/')
self.assertContains(resp2, '11:30am')
self.assertContains(resp2, '5:30pm')
| {
"repo_name": "arteria/django-openinghours",
"path": "openinghours/tests/test_forms.py",
"copies": "1",
"size": "1879",
"license": "mit",
"hash": -2769883819271983000,
"line_mean": 41.7045454545,
"line_max": 76,
"alpha_frac": 0.5513571048,
"autogenerated": false,
"ratio": 2.739067055393586,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8790424160193586,
"avg_score": 0,
"num_lines": 44
} |
from freezegun import freeze_time
import lemonyellow.data.dialogue as dialogue
import lemonyellow.data.items as items
from lemonyellow.data.pokemon_data import pokemon
from lemonyellow.core.kernel import create_player
from lemonyellow.core.kernel import pokemon_lotto
def test_player_not_initialized():
result = pokemon_lotto("totally not a Player object")
assert result is None
def test_elite_four_not_beaten_yet():
the_dude = create_player(name="Joe")
result = pokemon_lotto(the_dude)
assert result == dialogue.celadon_city.npc211
def test_elite_four_beaten():
the_dude = create_player(name="Joe")
the_dude.storyline.defeated.elite.rival = True
result = pokemon_lotto(the_dude)
assert dialogue.celadon_city.npc211A in result
@freeze_time("1969-12-31")
def test_not_waited_long_enough():
the_dude = create_player(name="Joe")
the_dude.storyline.defeated.elite.rival = True
result = pokemon_lotto(the_dude)
assert result == dialogue.celadon_city.npc211I
def test_win_grand_prize():
the_dude = create_player(name="Joe")
the_dude.storyline.defeated.elite.rival = True
the_dude.add_pokemon(pokemon.bulbasaur)
the_dude.slot[1].id_number = 12345
result = pokemon_lotto(the_dude, lotto_num=12345)
assert dialogue.celadon_city.npc211H in result
assert items.master_ball in the_dude.inventory
assert dialogue.celadon_city.npc211K not in result
def test_win_first_prize():
the_dude = create_player(name="Joe")
the_dude.storyline.defeated.elite.rival = True
the_dude.add_pokemon(pokemon.bulbasaur)
the_dude.slot[1].id_number = 10345
result = pokemon_lotto(the_dude, lotto_num=12345)
assert dialogue.celadon_city.npc211G in result
assert items.max_revive in the_dude.inventory
assert dialogue.celadon_city.npc211K not in result
def test_win_second_prize():
the_dude = create_player(name="Joe")
the_dude.storyline.defeated.elite.rival = True
the_dude.add_pokemon(pokemon.bulbasaur)
the_dude.slot[1].id_number = 10045
result = pokemon_lotto(the_dude, lotto_num=12345)
assert dialogue.celadon_city.npc211F in result
assert items.exp_share in the_dude.inventory
assert dialogue.celadon_city.npc211K not in result
def test_win_third_prize():
the_dude = create_player(name="Joe")
the_dude.storyline.defeated.elite.rival = True
the_dude.add_pokemon(pokemon.bulbasaur)
the_dude.slot[1].id_number = 10005
result = pokemon_lotto(the_dude, lotto_num=12345)
assert dialogue.celadon_city.npc211E in result
assert items.pp_up in the_dude.inventory
assert dialogue.celadon_city.npc211K not in result
def test_win_no_prize():
the_dude = create_player(name="Joe")
the_dude.storyline.defeated.elite.rival = True
the_dude.add_pokemon(pokemon.bulbasaur)
the_dude.slot[1].id_number = "00000"
result = pokemon_lotto(the_dude, lotto_num=12345)
assert dialogue.celadon_city.npc211K in result
assert the_dude.inventory == {}
def test_winning_pokemon_in_storage_system():
the_dude = create_player(name="Joe")
the_dude.storyline.defeated.elite.rival = True
for x in range(1, 7):
the_dude.add_pokemon(pokemon.bulbasaur)
the_dude.slot[x].id_number = "00000"
the_dude.add_pokemon(pokemon.bulbasaur)
the_dude.storage_system.box[1].slot[1].id_number = 12345
result = pokemon_lotto(the_dude, lotto_num=12345)
assert dialogue.celadon_city.npc211H in result
assert dialogue.celadon_city.npc211K not in result
assert items.master_ball in the_dude.inventory
def test_win_grand_prize_no_nickname():
the_dude = create_player(name="Joe")
the_dude.storyline.defeated.elite.rival = True
the_dude.add_pokemon(pokemon.bulbasaur)
the_dude.slot[1].nickname = ""
the_dude.slot[1].id_number = 12345
result = pokemon_lotto(the_dude, lotto_num=12345)
assert dialogue.celadon_city.npc211H in result
assert items.master_ball in the_dude.inventory
assert dialogue.celadon_city.npc211K not in result
def test_winning_pokemon_in_storage_system_no_nickname():
the_dude = create_player(name="Joe")
the_dude.storyline.defeated.elite.rival = True
for x in range(1, 7):
the_dude.add_pokemon(pokemon.bulbasaur)
the_dude.slot[x].id_number = "00000"
the_dude.add_pokemon(pokemon.bulbasaur)
the_dude.storage_system.box[1].slot[1].id_number = 12345
the_dude.storage_system.box[1].slot[1].nickname = ""
result = pokemon_lotto(the_dude, lotto_num=12345)
assert dialogue.celadon_city.npc211H in result
assert dialogue.celadon_city.npc211K not in result
assert items.master_ball in the_dude.inventory
| {
"repo_name": "itsthejoker/Pokemon-Homage",
"path": "lemonyellow/core/tests/test_pokemon_lotto.py",
"copies": "1",
"size": "4698",
"license": "mit",
"hash": 7016016297389958000,
"line_mean": 35.4186046512,
"line_max": 60,
"alpha_frac": 0.7203065134,
"autogenerated": false,
"ratio": 2.8216216216216217,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9041928135021622,
"avg_score": 0,
"num_lines": 129
} |
from freight import vcs
from freight.config import db, queue
from freight.models import LogChunk, TaskStatus
from freight.testutils import TransactionTestCase
from freight.utils.workspace import Workspace
class ExecuteTaskTestCase(TransactionTestCase):
# TODO(dcramer): this test relies on quite a few things actually working
def test_simple(self):
user = self.create_user()
repo = self.create_repo()
app = self.create_app(repository=repo)
self.create_taskconfig(app=app)
task = self.create_task(app=app, user=user)
deploy = self.create_deploy(app=app, task=task)
db.session.commit()
workspace = Workspace(path=repo.get_path())
vcs_backend = vcs.get(repo.vcs, url=repo.url, workspace=workspace)
if vcs_backend.exists():
vcs_backend.update()
else:
vcs_backend.clone()
queue.apply("freight.jobs.execute_deploy", kwargs={"deploy_id": deploy.id})
db.session.expire_all()
assert task.date_started is not None
assert task.date_finished is not None
assert task.status == TaskStatus.finished
logchunks = list(
LogChunk.query.filter(LogChunk.task_id == task.id).order_by(
LogChunk.offset.asc()
)
)
assert len(logchunks) >= 1
all_text = "".join(c.text for c in logchunks)
assert ">> Running ['/bin/echo', 'helloworld']" in all_text
| {
"repo_name": "getsentry/freight",
"path": "tests/tasks/test_execute_task.py",
"copies": "1",
"size": "1472",
"license": "apache-2.0",
"hash": 8862780254515075000,
"line_mean": 32.4545454545,
"line_max": 83,
"alpha_frac": 0.6379076087,
"autogenerated": false,
"ratio": 3.8839050131926123,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5021812621892612,
"avg_score": null,
"num_lines": null
} |
from freight.models import TaskStatus
from freight.notifiers import NotifierEvent, NotificationQueue
from freight.testutils import TestCase
class NotificationQueueTest(TestCase):
def setUp(self):
self.user = self.create_user()
self.repo = self.create_repo()
self.app = self.create_app(repository=self.repo)
self.deploy_config = self.create_taskconfig(app=self.app)
self.task = self.create_task(
app=self.app, user=self.user, status=TaskStatus.finished
)
def test_simple(self):
queue = NotificationQueue(delay=0)
queue.put(
task=self.task,
type="dummy",
config={"foo": "bar"},
event=NotifierEvent.TASK_STARTED,
)
result = queue.get()
assert result == {
"task": str(self.task.id),
"type": "dummy",
"config": {"foo": "bar"},
"event": NotifierEvent.TASK_STARTED,
}
result = queue.get()
assert result is None
| {
"repo_name": "getsentry/freight",
"path": "tests/notifiers/test_queue.py",
"copies": "1",
"size": "1036",
"license": "apache-2.0",
"hash": 7789812179910072000,
"line_mean": 30.3939393939,
"line_max": 68,
"alpha_frac": 0.583976834,
"autogenerated": false,
"ratio": 3.9693486590038316,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5053325493003832,
"avg_score": null,
"num_lines": null
} |
from frequency_plan import Frequency
from enum import Enum
from utils.log import logger, ConstLog
from struct import pack
class EU863_870(Frequency):
JOIN_ACCEPT_DELAY = 5
MAX_FCNT_GAP = 16384
ADR_ACK_LIMIT = 64
ADR_ACK_DELAY = 32
ACK_TIMEOUT = 2 # 2 +/-1 s random delay between 1 and 3 seconds
RF_CH = 0
class DataRate(Enum):
SF12BW125 = 0
SF11BW125 = 1
SF10BW125 = 2
SF9BW125 = 3
SF8BW125 = 4
SF7BW125 = 5
SF7BW250 = 6
FSK = 7
RX2Frequency = 869.525
RX2DataRate = 0
RX1DRoffset = 0
RxDelay = 1
@classmethod
def rx1_freq(cls, freq_up):
return freq_up
@classmethod
def rx1_datr(cls, dr_up, dr_offset):
"""
:param dr_up: int
:param dr_offset: int
:return: str like "SF7BW250"
"""
assert 0 <= dr_up <= 7
assert 0 <= dr_offset <= 5
dr_dn = dr_up - dr_offset
if dr_dn < 0:
dr_dn = 0
return cls.DataRate(dr_dn)
@classmethod
def b_freq(cls):
return cls.BEACON_FREQ
BEACON_FREQ = 869.525
class Channel:
"""
Default Freq Ch1 868.1
Default Freq Ch2 868.3
Default Freq Ch3 868.5
Ch4 Freq 867.1 * (10 ** 4) 8671000
Ch5 Freq 867.3 8673000
Ch6 Freq 867.5 8675000
Ch7 Freq 867.7 8677000
Ch8 Freq 867.9 8679000
Ch9 lora-std Freq 868.3 SF7BW250
"""
Ch4 = 8671000
Ch5 = 8673000
Ch6 = 8675000
Ch7 = 8677000
Ch8 = 8679000
CF_LIST = Ch4.to_bytes(3, 'little') + Ch5.to_bytes(3, 'little') + \
Ch6.to_bytes(3, 'little') + Ch7.to_bytes(3, 'little') + \
Ch8.to_bytes(3, 'little') + bytes([0])
CH_MASK = b'\xff\x01' # Ch1-9 open
CH_MASK_CNTL = 0
NB_TRANS = 1
class TXPower(Enum):
dBm20 = 0
dBm14 = 1
dBm11 = 2
dBm8 = 3
dBm5 = 4
dBm2 = 5
default = 1
MAX_LENGTH = {
DataRate.SF12BW125: 59,
DataRate.SF11BW125: 59,
DataRate.SF10BW125: 59,
DataRate.SF9BW125: 123,
DataRate.SF8BW125: 230,
DataRate.SF7BW125: 230,
DataRate.SF7BW250: 230,
DataRate.FSK: 230,
}
@staticmethod
def adr_schema(rssi, recent_datr):
if rssi > -47:
return 6
elif -50 < rssi <= -47:
if recent_datr == 5:
return recent_datr
else:
return 6
elif -57 < rssi <= -50:
return 5
elif -60 < rssi <= -57:
if recent_datr == 4:
return recent_datr
else:
return 5
elif -67 < rssi <= -60:
return 4
elif -70 < rssi <= -67:
if recent_datr == 3:
return recent_datr
else:
return 4
elif -77 < rssi <= -70:
return 3
elif -80 < rssi <= -77:
if recent_datr == 2:
return recent_datr
else:
return 3
elif -87 < rssi <= -80:
return 2
elif -90 < rssi <= -87:
if recent_datr == 1:
return recent_datr
else:
return 2
elif -107 < rssi <= -90:
return 1
elif -110 < rssi <= -107:
if recent_datr == 0:
return recent_datr
else:
return 1
else:
return 0
# elif rssi <= -110:
# return 0
# else:
# logger.error(ConstLog.adr + 'rssi %s recent_datr %s' % (rssi, recent_datr))
"""
rssi range from Rossi:
'SF12BW125': -110~,
'SF11BW125': -90~-110,
'SF10BW125': -80~-90,
'SF9BW125': -70~-80,
'SF8BW125': -60~-70,
'SF7BW125': -50~60,
'SF7BW250': 0~-50,
'FSK': ,
""" | {
"repo_name": "soybean217/lora-python",
"path": "GServer/frequency_plan/EU863_870.py",
"copies": "1",
"size": "3993",
"license": "mit",
"hash": -2564942947560669700,
"line_mean": 23.9625,
"line_max": 89,
"alpha_frac": 0.4678186827,
"autogenerated": false,
"ratio": 3.254278728606357,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4222097411306357,
"avg_score": null,
"num_lines": null
} |
from frequency_plan import Frequency
from enum import Enum
from utils.log import logger, ConstLog
class CP500(Frequency):
JOIN_ACCEPT_DELAY = 5
MAX_FCNT_GAP = 16384
ADR_ACK_LIMIT = 64
ADR_ACK_DELAY = 32
ACK_TIMEOUT = 2 # 2 +/-1 s random delay between 1 and 3 seconds
RF_CH = 0
class DataRate(Enum):
SF12BW125 = 0
SF11BW125 = 1
SF10BW125 = 2
SF9BW125 = 3
SF8BW125 = 4
SF7BW125 = 5
SF7BW250 = 6
FSK = 7
RX2Frequency = 501.7
RX2DataRate = 0
RX1DRoffset = 0
RxDelay = 1
@classmethod
def rx1_freq(cls, freq_up):
return freq_up
@classmethod
def rx1_datr(cls, dr_up, dr_offset):
"""
:param dr_up: int
:param dr_offset: int
:return: str like "SF7BW250"
"""
assert 0 <= dr_up <= 7
assert 0 <= dr_offset <= 5
dr_dn = dr_up - dr_offset
if dr_dn < 0:
dr_dn = 0
return cls.DataRate(dr_dn)
@classmethod
def b_freq(cls):
return cls.BEACON_FREQ
BEACON_FREQ = 501.7
class Channel:
"""
Default Freq Ch1 500.3
Default Freq Ch2 500.5
Default Freq Ch3 500.7
Ch4 Freq 500.9 * (10 ** 4) 8671000
Ch5 Freq 501.1 8673000
Ch6 Freq 501.3 8675000
Ch7 Freq 501.5 8677000
Ch8 Freq 501.7 8679000
Ch9 lora-std Freq 868.3 SF7BW250
"""
Ch4 = 5009000
Ch5 = 5011000
Ch6 = 5013000
Ch7 = 5015000
Ch8 = 5017000
CF_LIST = Ch4.to_bytes(3, 'little') + Ch5.to_bytes(3, 'little') + \
Ch6.to_bytes(3, 'little') + Ch7.to_bytes(3, 'little') + \
Ch8.to_bytes(3, 'little') + bytes([0])
CH_MASK = b'\xff\x00' # Ch1-8 open
CH_MASK_CNTL = 0
NB_TRANS = 1
class TXPower(Enum):
dBm20 = 0
dBm14 = 1
dBm11 = 2
dBm8 = 3
dBm5 = 4
dBm2 = 5
default = 1
MAX_LENGTH = {
DataRate.SF12BW125: 59,
DataRate.SF11BW125: 59,
DataRate.SF10BW125: 59,
DataRate.SF9BW125: 123,
DataRate.SF8BW125: 230,
DataRate.SF7BW125: 230,
DataRate.SF7BW250: 230,
DataRate.FSK: 230,
}
@staticmethod
def adr_schema(rssi, recent_datr):
if rssi > -47:
return 6
elif -50 < rssi <= -47:
if recent_datr == 5:
return recent_datr
else:
return 6
elif -57 < rssi <= -50:
return 5
elif -60 < rssi <= -57:
if recent_datr == 4:
return recent_datr
else:
return 5
elif -67 < rssi <= -60:
return 4
elif -70 < rssi <= -67:
if recent_datr == 3:
return recent_datr
else:
return 4
elif -77 < rssi <= -70:
return 3
elif -80 < rssi <= -77:
if recent_datr == 2:
return recent_datr
else:
return 3
elif -87 < rssi <= -80:
return 2
elif -90 < rssi <= -87:
if recent_datr == 1:
return recent_datr
else:
return 2
elif -107 < rssi <= -90:
return 1
elif -110 < rssi <= -107:
if recent_datr == 0:
return recent_datr
else:
return 1
elif rssi:
return 0
# else:
# logger.error(ConstLog.adr + 'rssi %s recent_datr %s' % (rssi, recent_datr))
"""
rssi range from Rossi:
'SF12BW125': -110~,
'SF11BW125': -90~-110,
'SF10BW125': -80~-90,
'SF9BW125': -70~-80,
'SF8BW125': -60~-70,
'SF7BW125': -50~60,
'SF7BW250': 0~-50,
'FSK': ,
""" | {
"repo_name": "soybean217/lora-python",
"path": "GServer/frequency_plan/CP500.py",
"copies": "1",
"size": "3914",
"license": "mit",
"hash": -5836978879399225000,
"line_mean": 23.9363057325,
"line_max": 89,
"alpha_frac": 0.4667858968,
"autogenerated": false,
"ratio": 3.2698412698412698,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.42366271666412697,
"avg_score": null,
"num_lines": null
} |
from frequency_plan import Frequency
from enum import Enum
from utils.log import logger, ConstLog
class EU433(Frequency):
JOIN_ACCEPT_DELAY = 5
MAX_FCNT_GAP = 16384
ADR_ACK_LIMIT = 64
ADR_ACK_DELAY = 32
ACK_TIMEOUT = 2 # 2 +/-1 s random delay between 1 and 3 seconds
RF_CH = 0
class DataRate(Enum):
SF12BW125 = 0
SF11BW125 = 1
SF10BW125 = 2
SF9BW125 = 3
SF8BW125 = 4
SF7BW125 = 5
SF7BW250 = 6
FSK = 7
RX2Frequency = 434.665
RX2DataRate = 0
RX1DRoffset = 0
RxDelay = 1
@classmethod
def rx1_freq(cls, freq_up):
return freq_up
@classmethod
def rx1_datr(cls, dr_up, dr_offset):
"""
:param dr_up: int
:param dr_offset: int
:return: str like "SF7BW250"
"""
assert 0 <= dr_up <= 7
assert 0 <= dr_offset <= 5
dr_dn = dr_up - dr_offset
if dr_dn < 0:
dr_dn = 0
return cls.DataRate(dr_dn)
class Channel:
"""
Default Ch1 Freq 433.175
Default Ch2 Freq 433.375
Default Ch3 Freq 433.575
Ch4 Freq 433.775 * (10 ** 4) 4337750
Ch5 Freq 433.975 4339750
Ch6 disabled
Ch7 disabled
Ch8 disabled
Ch9 lora-std Freq 434.175 SF7BW250
"""
Ch4 = 4337750
Ch5 = 4339750
Ch6 = 0
Ch7 = 0
Ch8 = 0
CF_LIST = Ch4.to_bytes(3, 'little') + Ch5.to_bytes(3, 'little') + \
Ch6.to_bytes(3, 'little') + Ch7.to_bytes(3, 'little') + \
Ch8.to_bytes(3, 'little') + bytes([0])
CH_MASK = b'\x07\x00' # Ch1-3 # Ch1-5 and Ch9 open
CH_MASK_CNTL = 0
NB_TRANS = 1
class TXPower(Enum):
dBm10 = 0
dBm7 = 1
dBm4 = 2
dBm1 = 3
dBmN2 = 4
dBmN5 = 5
default = 1
MAX_LENGTH = {
DataRate.SF12BW125: 59,
DataRate.SF11BW125: 59,
DataRate.SF10BW125: 59,
DataRate.SF9BW125: 123,
DataRate.SF8BW125: 230,
DataRate.SF7BW125: 230,
DataRate.SF7BW250: 230,
DataRate.FSK: 230,
}
@staticmethod
def adr_schema(rssi, recent_datr):
if rssi > -47:
return 6
elif -50 < rssi <= -47:
if recent_datr == 5:
return recent_datr
else:
return 6
elif -57 < rssi <= -50:
return 5
elif -60 < rssi <= -57:
if recent_datr == 4:
return recent_datr
else:
return 5
elif -67 < rssi <= -60:
return 4
elif -70 < rssi <= -67:
if recent_datr == 3:
return recent_datr
else:
return 4
elif -77 < rssi <= -70:
return 3
elif -80 < rssi <= -77:
if recent_datr == 2:
return recent_datr
else:
return 3
elif -87 < rssi <= -80:
return 2
elif -90 < rssi <= -87:
if recent_datr == 1:
return recent_datr
else:
return 2
elif -107 < rssi <= -90:
return 1
elif -110 < rssi <= -107:
if recent_datr == 0:
return recent_datr
else:
return 1
else:
return 0
# elif rssi <= -110:
# return 0
# else:
# logger.error(ConstLog.adr + 'rssi %s recent_datr %s' % (rssi, recent_datr)) | {
"repo_name": "soybean217/lora-python",
"path": "GServer/frequency_plan/EU433.py",
"copies": "1",
"size": "3599",
"license": "mit",
"hash": 6817399547284538000,
"line_mean": 24.5319148936,
"line_max": 89,
"alpha_frac": 0.462350653,
"autogenerated": false,
"ratio": 3.373008434864105,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9333473496924448,
"avg_score": 0.00037711818793137913,
"num_lines": 141
} |
from frequency_plan import Frequency
from enum import Enum
from utils.log import logger, ConstLog
class MT433(Frequency):
JOIN_ACCEPT_DELAY = 5
MAX_FCNT_GAP = 16384
ADR_ACK_LIMIT = 64
ADR_ACK_DELAY = 32
ACK_TIMEOUT = 2 # 2 +/-1 s random delay between 1 and 3 seconds
RF_CH = 0
class DataRate(Enum):
SF12BW125 = 0
SF11BW125 = 1
SF10BW125 = 2
SF9BW125 = 3
SF8BW125 = 4
SF7BW125 = 5
SF7BW250 = 6
FSK = 7
RX2Frequency = 434.665
RX2DataRate = 0
RX1DRoffset = 0
RxDelay = 1
@classmethod
def rx1_freq(cls, freq_up):
return freq_up
@classmethod
def rx1_datr(cls, dr_up, dr_offset):
"""
:param dr_up: int
:param dr_offset: int
:return: str like "SF7BW250"
"""
assert 0 <= dr_up <= 7
assert 0 <= dr_offset <= 5
dr_dn = dr_up - dr_offset
if dr_dn < 0:
dr_dn = 0
return cls.DataRate(dr_dn)
class Channel:
"""
Default Ch1 Freq 433.3
Default Ch2 Freq 433.5
Default Ch3 Freq 433.7
Ch4 Freq 433.9 * (10 ** 4) 4339000
Ch5 Freq 434.0 4340000
Ch6 Freq 434.2 4342000
Ch7 Freq 434.4 4342000
Ch8 Freq 434.6 4342000
"""
Ch4 = 4339000
Ch5 = 4340000
Ch6 = 4342000
Ch7 = 4344000
Ch8 = 4346000
CF_LIST = Ch4.to_bytes(3, 'little') + Ch5.to_bytes(3, 'little') + \
Ch6.to_bytes(3, 'little') + Ch7.to_bytes(3, 'little') + \
Ch8.to_bytes(3, 'little') + bytes([0])
CH_MASK = b'\xff\x00' # Ch1-8 open
CH_MASK_CNTL = 0
NB_TRANS = 1
class TXPower(Enum):
dBm10 = 0
dBm7 = 1
dBm4 = 2
dBm1 = 3
dBmN2 = 4
dBmN5 = 5
default = 1
MAX_LENGTH = {
DataRate.SF12BW125: 59,
DataRate.SF11BW125: 59,
DataRate.SF10BW125: 59,
DataRate.SF9BW125: 123,
DataRate.SF8BW125: 230,
DataRate.SF7BW125: 230,
DataRate.SF7BW250: 230,
DataRate.FSK: 230,
}
@staticmethod
def adr_schema(rssi, recent_datr):
if rssi > -47:
return 6
elif -50 < rssi <= -47:
if recent_datr == 5:
return recent_datr
else:
return 6
elif -57 < rssi <= -50:
return 5
elif -60 < rssi <= -57:
if recent_datr == 4:
return recent_datr
else:
return 5
elif -67 < rssi <= -60:
return 4
elif -70 < rssi <= -67:
if recent_datr == 3:
return recent_datr
else:
return 4
elif -77 < rssi <= -70:
return 3
elif -80 < rssi <= -77:
if recent_datr == 2:
return recent_datr
else:
return 3
elif -87 < rssi <= -80:
return 2
elif -90 < rssi <= -87:
if recent_datr == 1:
return recent_datr
else:
return 2
elif -107 < rssi <= -90:
return 1
elif -110 < rssi <= -107:
if recent_datr == 0:
return recent_datr
else:
return 1
else:
return 0
# elif rssi <= -110:
# return 0
# else:
# logger.error(ConstLog.adr + 'rssi %s recent_datr %s' % (rssi, recent_datr)) | {
"repo_name": "soybean217/lora-python",
"path": "GServer/frequency_plan/MT433.py",
"copies": "1",
"size": "3578",
"license": "mit",
"hash": 4832990538746725000,
"line_mean": 24.5642857143,
"line_max": 89,
"alpha_frac": 0.46254891,
"autogenerated": false,
"ratio": 3.350187265917603,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4312736175917603,
"avg_score": null,
"num_lines": null
} |
from frequency_plan import Frequency
from enum import Enum
class CN470_510(Frequency):
JOIN_ACCEPT_DELAY = 5
MAX_FCNT_GAP = 16384
ADR_ACK_LIMIT = 64
ADR_ACK_DELAY = 32
ACK_TIMEOUT = 2 # 2 +/-1 s random delay between 1 and 3 seconds
RF_CH = 0
class DataRate(Enum):
SF12BW125 = 0
SF11BW125 = 1
SF10BW125 = 2
SF9BW125 = 3
SF8BW125 = 4
SF7BW125 = 5
RX2Frequency = 505.3
RX2DataRate = 0
RX1DRoffset = 0
RxDelay = 1
class TXPower(Enum):
dBm17 = 0
dBm16 = 1
dBm14 = 2
dBm12 = 3
dBm10 = 4
dBm7 = 5
dBm5 = 6
dBm2 = 7
default = 7
@classmethod
def rx1_datr(cls, dr_up, dr_offset):
"""
:param dr_up: int
:param dr_offset: int
:return: DataRate
"""
assert 0 <= dr_up <= 5
assert 0 <= dr_offset <= 3
dr_dn = dr_up - dr_offset
if dr_dn < 0:
dr_dn = 0
return cls.DataRate(dr_dn)
@classmethod
def rx1_freq(cls, freq_up):
"""
:param freq_up: float (MHz)
:return: float (MHz)
"""
chan_up = cls.get_channel_up_by_freq(freq_up)
chan_dn = chan_up % 48
freq_dn = cls.get_freq_dn_by_channel(chan_dn)
return freq_dn
@staticmethod
def get_freq_dn_by_channel(channel):
"""
:param channel: int
:return: float (MHz)
"""
assert 0 <= channel <= 47
# print(channel)
# print(500.3 + 0.2 * channel)
return 500 + (3 + 2 * channel)/10
# return 500.3 + 0.2 * channel
@staticmethod
def get_channel_up_by_freq(frequency):
"""
:param frequency: float (MHz)
:return:
"""
assert 470.3 <= frequency <= 489.3, 'CN470_510 Frequency Plan got Frequency: %s'%frequency
channel = (frequency - 470.3) / 0.2
decimal = channel % 1
if decimal >= 0.5:
channel = int(channel) + 1
else:
channel = int(channel)
return int(channel)
class Channel:
"""
Ch1 470.3
Ch2 470.5
Ch3 470.7
Ch4 470.9
Ch5 471.1
Ch6 471.3
Ch7 471.5
Ch8 471.7
"""
CF_LIST = b''
CH_MASK = b'\xff\x00' # Ch1-8 open
CH_MASK_CNTL = 0
NB_TRANS = 1
MAX_LENGTH = {
DataRate.SF12BW125: 59,
DataRate.SF11BW125: 59,
DataRate.SF10BW125: 59,
DataRate.SF9BW125: 123,
DataRate.SF8BW125: 230,
DataRate.SF7BW125: 230,
}
@staticmethod
def adr_schema(rssi, recent_datr):
if -57 < rssi:
return 5
elif -60 < rssi <= -57:
if recent_datr == 4:
return recent_datr
else:
return 5
elif -67 < rssi <= -60:
return 4
elif -70 < rssi <= -67:
if recent_datr == 3:
return recent_datr
else:
return 4
elif -77 < rssi <= -70:
return 3
elif -80 < rssi <= -77:
if recent_datr == 2:
return recent_datr
else:
return 3
elif -87 < rssi <= -80:
return 2
elif -90 < rssi <= -87:
if recent_datr == 1:
return recent_datr
else:
return 2
elif -107 < rssi <= -90:
return 1
elif -100 < rssi <= -107:
if recent_datr == 0:
return recent_datr
else:
return 1
else:
return 0
# else:
# logger.error(ConstLog.adr + 'rssi %s recent_datr %s' % (rssi, recent_datr))
if __name__ == '__main__':
print(CN470_510.rx1_freq(470.9)) | {
"repo_name": "soybean217/lora-python",
"path": "GServer/frequency_plan/CN470_510.py",
"copies": "1",
"size": "3877",
"license": "mit",
"hash": 8133878657188914000,
"line_mean": 23.5443037975,
"line_max": 98,
"alpha_frac": 0.4650502966,
"autogenerated": false,
"ratio": 3.424911660777385,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4389961957377385,
"avg_score": null,
"num_lines": null
} |
from frequency_plan import Frequency
from enum import Enum
class CNICG470(Frequency):
JOIN_ACCEPT_DELAY = 5
MAX_FCNT_GAP = 16384
ADR_ACK_LIMIT = 64
ADR_ACK_DELAY = 32
ACK_TIMEOUT = 2 # 2 +/-1 s random delay between 1 and 3 seconds
RF_CH = 0
class DataRate(Enum):
SF12BW125 = 0
SF11BW125 = 1
SF10BW125 = 2
SF9BW125 = 3
SF8BW125 = 4
SF7BW125 = 5
RX2Frequency = 470.3
RX2DataRate = 0
RX1DRoffset = 0
RxDelay = 1
@classmethod
def b_freq(cls):
return cls.BEACON_FREQ
BEACON_FREQ = 470.3
class TXPower(Enum):
dBm17 = 0
dBm16 = 1
dBm14 = 2
dBm12 = 3
dBm10 = 4
dBm7 = 5
dBm5 = 6
dBm2 = 7
default = 7
@classmethod
def rx1_datr(cls, dr_up, dr_offset):
"""
:param dr_up: int
:param dr_offset: int
:return: DataRate
"""
assert 0 <= dr_up <= 5
assert 0 <= dr_offset <= 3
dr_dn = dr_up - dr_offset
if dr_dn < 0:
dr_dn = 0
return cls.DataRate(dr_dn)
@classmethod
def rx1_freq(cls, freq_up):
"""
:param freq_up: float (MHz)
:return: float (MHz)
"""
return freq_up
class Channel:
"""
Ch1 470.3
Ch2 470.5
Ch3 470.7
Ch4 470.9
Ch5 471.1
Ch6 471.3
Ch7 471.5
Ch8 471.7
"""
CF_LIST = b''
CH_MASK = b'\xff\x00' # Ch1-8 open
CH_MASK_CNTL = 0
NB_TRANS = 1
MAX_LENGTH = {
DataRate.SF12BW125: 59,
DataRate.SF11BW125: 59,
DataRate.SF10BW125: 59,
DataRate.SF9BW125: 123,
DataRate.SF8BW125: 230,
DataRate.SF7BW125: 230,
}
@staticmethod
def adr_schema(rssi, recent_datr):
if -57 < rssi:
return 5
elif -60 < rssi <= -57:
if recent_datr == 4:
return recent_datr
else:
return 5
elif -67 < rssi <= -60:
return 4
elif -70 < rssi <= -67:
if recent_datr == 3:
return recent_datr
else:
return 4
elif -77 < rssi <= -70:
return 3
elif -80 < rssi <= -77:
if recent_datr == 2:
return recent_datr
else:
return 3
elif -87 < rssi <= -80:
return 2
elif -90 < rssi <= -87:
if recent_datr == 1:
return recent_datr
else:
return 2
elif -107 < rssi <= -90:
return 1
elif -100 < rssi <= -107:
if recent_datr == 0:
return recent_datr
else:
return 1
else:
return 0
# else:
# logger.error(ConstLog.adr + 'rssi %s recent_datr %s' % (rssi, recent_datr))
if __name__ == '__main__':
print(CN470_510.rx1_freq(470.9)) | {
"repo_name": "soybean217/lora-python",
"path": "GServer/frequency_plan/CNICG470.py",
"copies": "1",
"size": "3055",
"license": "mit",
"hash": -8215020838560402000,
"line_mean": 21.8059701493,
"line_max": 89,
"alpha_frac": 0.4533551555,
"autogenerated": false,
"ratio": 3.3645374449339207,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9315059543496386,
"avg_score": 0.0005666113875069099,
"num_lines": 134
} |
from .frequencyscale import LinearScale, FrequencyBand, ExplicitScale
from .tfrepresentation import FrequencyDimension
from .frequencyadaptive import FrequencyAdaptive
from zounds.timeseries import \
audio_sample_rate, TimeSlice, Seconds, TimeDimension, HalfLapped, \
Milliseconds, SampleRate
from zounds.core import ArrayWithUnits, IdentityDimension
from .sliding_window import \
IdentityWindowingFunc, HanningWindowingFunc, WindowingFunc, \
OggVorbisWindowingFunc
from zounds.loudness import log_modulus, unit_scale
import numpy as np
from scipy.signal import resample, firwin2
from matplotlib import cm
from scipy.signal import hann, morlet
from itertools import repeat
from zounds.nputil import sliding_window
def fft(x, axis=-1, padding_samples=0):
"""
Apply an FFT along the given dimension, and with the specified amount of
zero-padding
Args:
x (ArrayWithUnits): an :class:`~zounds.core.ArrayWithUnits` instance
which has one or more :class:`~zounds.timeseries.TimeDimension`
axes
axis (int): The axis along which the fft should be applied
padding_samples (int): The number of padding zeros to apply along
axis before performing the FFT
"""
if padding_samples > 0:
padded = np.concatenate(
[x, np.zeros((len(x), padding_samples), dtype=x.dtype)],
axis=axis)
else:
padded = x
transformed = np.fft.rfft(padded, axis=axis, norm='ortho')
sr = audio_sample_rate(int(Seconds(1) / x.dimensions[axis].frequency))
scale = LinearScale.from_sample_rate(sr, transformed.shape[-1])
new_dimensions = list(x.dimensions)
new_dimensions[axis] = FrequencyDimension(scale)
return ArrayWithUnits(transformed, new_dimensions)
def stft(x, window_sample_rate=HalfLapped(), window=HanningWindowingFunc()):
duration = TimeSlice(window_sample_rate.duration)
frequency = TimeSlice(window_sample_rate.frequency)
if x.ndim == 1:
_, arr = x.sliding_window_with_leftovers(
duration, frequency, dopad=True)
elif x.ndim == 2 and isinstance(x.dimensions[0], IdentityDimension):
arr = x.sliding_window((1, duration), (1, frequency))
td = x.dimensions[-1]
dims = [IdentityDimension(), TimeDimension(*window_sample_rate), td]
arr = ArrayWithUnits(arr.reshape((len(x), -1, arr.shape[-1])), dims)
else:
raise ValueError(
'x must either have a single TimeDimension, or '
'(IdentityDimension, TimeDimension)')
window = window or IdentityWindowingFunc()
windowed = arr * window._wdata(arr.shape[-1])
return fft(windowed)
def mdct(data):
l = data.shape[-1] // 2
t = np.arange(0, 2 * l)
f = np.arange(0, l)
cpi = -1j * np.pi
a = data * np.exp(cpi * t / 2 / l)
b = np.fft.fft(a)
c = b[..., :l]
transformed = np.sqrt(2 / l) * np.real(
c * np.exp(cpi * (f + 0.5) * (l + 1) / 2 / l))
return transformed
def imdct(frames):
l = frames.shape[-1]
t = np.arange(0, 2 * l)
f = np.arange(0, l)
cpi = -1j * np.pi
a = frames * np.exp(cpi * (f + 0.5) * (l + 1) / 2 / l)
b = np.fft.fft(a, 2 * l)
return np.sqrt(2 / l) * np.real(b * np.exp(cpi * t / 2 / l))
def time_stretch(x, factor, frame_sample_rate=None):
if frame_sample_rate is None:
sr = HalfLapped()
sr = SampleRate(frequency=sr.frequency / 2, duration=sr.duration)
else:
sr = frame_sample_rate
hop_length, window_length = sr.discrete_samples(x)
win = WindowingFunc(windowing_func=hann)
# to simplify, let's always compute the stft in "batch" mode
if x.ndim == 1:
x = x.reshape((1,) + x.shape)
D = stft(x, sr, win)
n_fft_coeffs = D.shape[-1]
n_frames = D.shape[1]
n_batches = D.shape[0]
time_steps = np.arange(0, n_frames, factor, dtype=np.float)
weights = np.mod(time_steps, 1.0)
exp_phase_advance = np.linspace(0, np.pi * hop_length, n_fft_coeffs)
# pad in the time dimension, so no edge/end frames are left out
# coeffs = np.pad(D, [(0, 0), (0, 2), (0, 0)], mode='constant')
shape = list(D.shape)
shape[1] += 2
coeffs = np.zeros(shape, dtype=D.dtype)
coeffs[:, :-2, :] = D
coeffs_mags = np.abs(coeffs)
coeffs_phases = np.angle(coeffs)
# we need a phase accumulator for every item in the batch
phase_accum = coeffs_phases[:, :1, :]
sliding_indices = np.vstack([time_steps, time_steps + 1]).T.astype(np.int32)
windowed_mags = coeffs_mags[:, sliding_indices, :]
windowed_phases = coeffs_phases[:, sliding_indices, :]
first_mags = windowed_mags[:, :, 0, :]
second_mags = windowed_mags[:, :, 1, :]
first_phases = windowed_phases[:, :, 0, :]
second_phases = windowed_phases[:, :, 1, :]
# compute all the phase stuff
two_pi = 2.0 * np.pi
dphase = (second_phases - first_phases - exp_phase_advance)
dphase -= two_pi * np.round(dphase / two_pi)
dphase += exp_phase_advance
all_phases = np.concatenate([phase_accum, dphase], axis=1)
dphase = np.cumsum(all_phases, axis=1, out=all_phases)
dphase = dphase[:, :-1, :]
# linear interpolation of FFT coefficient magnitudes
weights = weights[None, :, None]
mags = ((1.0 - weights) * first_mags) + (weights * second_mags)
# combine magnitudes and phases
new_coeffs = mags * np.exp(1.j * dphase)
# synthesize the new frames
new_frames = np.fft.irfft(new_coeffs, axis=-1, norm='ortho')
# new_frames = new_frames * win._wdata(new_frames.shape[-1])
new_frames = np.multiply(
new_frames, win._wdata(new_frames.shape[-1]), out=new_frames)
# overlap add the new audio samples
new_n_samples = int(x.shape[-1] / factor)
output = np.zeros((n_batches, new_n_samples), dtype=x.dtype)
for i in range(new_frames.shape[1]):
start = i * hop_length
stop = start + new_frames.shape[-1]
l = output[:, start: stop].shape[1]
output[:, start: stop] += new_frames[:, i, :l]
return ArrayWithUnits(output, [IdentityDimension(), x.dimensions[-1]])
def pitch_shift(x, semitones, frame_sample_rate=None):
original_shape = x.shape[1] if x.ndim == 2 else x.shape[0]
# first, perform a time stretch so that the audio will have the desired
# pitch
factor = 2.0 ** (-float(semitones) / 12.0)
stretched = time_stretch(x, factor, frame_sample_rate=frame_sample_rate)
# hang on to original dimensions
dimensions = stretched.dimensions
# window the audio using a power-of-2 frame size for more efficient FFT
# computations
batch_size = stretched.shape[0]
window_size = 1024
step = (1, window_size)
new_window_shape = int(window_size * factor)
padding = window_size - int(stretched.shape[-1] % window_size)
stretched = np.pad(stretched, ((0, 0), (0, padding)), mode='constant')
windowed = sliding_window(stretched, step, step, flatten=False).squeeze()
# resample the audio so that it has the correct duration
rs = resample(windowed, new_window_shape, axis=-1)
# flatten out the windowed, resampled audio
rs = rs.reshape(batch_size, -1)
# slice the audio to remove residual zeros resulting from our power-of-2
# zero padding above
rs = rs[:, :original_shape]
return ArrayWithUnits(rs, dimensions)
def phase_shift(coeffs, samplerate, time_shift, axis=-1, frequency_band=None):
frequency_dim = coeffs.dimensions[axis]
if not isinstance(frequency_dim, FrequencyDimension):
raise ValueError(
'dimension {axis} of coeffs must be a FrequencyDimension instance, '
'but was {cls}'.format(axis=axis, cls=frequency_dim.__class__))
n_coeffs = coeffs.shape[axis]
shift_samples = int(time_shift / samplerate.frequency)
shift = (np.arange(0, n_coeffs) * 2j * np.pi) / n_coeffs
shift = np.exp(-shift * shift_samples)
shift = ArrayWithUnits(shift, [frequency_dim])
frequency_band = frequency_band or slice(None)
new_coeffs = coeffs.copy()
if coeffs.ndim == 1:
new_coeffs[frequency_band] *= shift[frequency_band]
return new_coeffs
slices = [slice(None) for _ in range(coeffs.ndim)]
slices[axis] = frequency_band
new_coeffs[tuple(slices)] *= shift[frequency_band]
return new_coeffs
def apply_scale(short_time_fft, scale, window=None):
magnitudes = np.abs(short_time_fft.real)
spectrogram = scale.apply(magnitudes, window)
dimensions = short_time_fft.dimensions[:-1] + (FrequencyDimension(scale),)
return ArrayWithUnits(spectrogram, dimensions)
def rainbowgram(time_frequency_repr, colormap=cm.rainbow):
# magnitudes on a log scale, and shifted and
# scaled to the unit interval
magnitudes = np.abs(time_frequency_repr.real)
magnitudes = log_modulus(magnitudes * 1000)
magnitudes = unit_scale(magnitudes)
angles = np.angle(time_frequency_repr)
angles = np.unwrap(angles, axis=0)
angles = np.gradient(angles)[0]
angles = unit_scale(angles)
colors = colormap(angles)
colors *= magnitudes[..., None]
# exclude the alpha channel, if there is one
colors = colors[..., :3]
arr = ArrayWithUnits(
colors, time_frequency_repr.dimensions + (IdentityDimension(),))
return arr
def fir_filter_bank(scale, taps, samplerate, window):
basis = np.zeros((len(scale), taps))
basis = ArrayWithUnits(basis, [
FrequencyDimension(scale),
TimeDimension(*samplerate)])
nyq = samplerate.nyquist
if window.ndim == 1:
window = repeat(window, len(scale))
for i, band, win in zip(range(len(scale)), scale, window):
start_hz = max(0, band.start_hz)
stop_hz = min(nyq, band.stop_hz)
freqs = np.linspace(
start_hz / nyq, stop_hz / nyq, len(win), endpoint=False)
freqs = [0] + list(freqs) + [1]
gains = [0] + list(win) + [0]
basis[i] = firwin2(taps, freqs, gains)
return basis
def morlet_filter_bank(
samplerate,
kernel_size,
scale,
scaling_factor,
normalize=True):
"""
Create a :class:`~zounds.core.ArrayWithUnits` instance with a
:class:`~zounds.timeseries.TimeDimension` and a
:class:`~zounds.spectral.FrequencyDimension` representing a bank of morlet
wavelets centered on the sub-bands of the scale.
Args:
samplerate (SampleRate): the samplerate of the input signal
kernel_size (int): the length in samples of each filter
scale (FrequencyScale): a scale whose center frequencies determine the
fundamental frequency of each filer
scaling_factor (int or list of int): Scaling factors for each band,
which determine the time-frequency resolution tradeoff.
The number(s) should fall between 0 and 1, with smaller numbers
achieving better frequency resolution, and larget numbers better
time resolution
normalize (bool): When true, ensure that each filter in the bank
has unit norm
See Also:
:class:`~zounds.spectral.FrequencyScale`
:class:`~zounds.timeseries.SampleRate`
"""
basis_size = len(scale)
basis = np.zeros((basis_size, kernel_size), dtype=np.complex128)
try:
if len(scaling_factor) != len(scale):
raise ValueError('scaling factor must have same length as scale')
except TypeError:
scaling_factor = np.repeat(float(scaling_factor), len(scale))
sr = int(samplerate)
for i, band in enumerate(scale):
scaling = scaling_factor[i]
w = band.center_frequency / (scaling * 2 * sr / kernel_size)
basis[i] = morlet(
M=kernel_size,
w=w,
s=scaling)
basis = basis.real
if normalize:
basis /= np.linalg.norm(basis, axis=-1, keepdims=True) + 1e-8
basis = ArrayWithUnits(
basis, [FrequencyDimension(scale), TimeDimension(*samplerate)])
return basis
def auto_correlogram(x, filter_bank, correlation_window=Milliseconds(30)):
n_filters = filter_bank.shape[0]
filter_size = filter_bank.shape[1]
corr_win_samples = int(correlation_window / x.samplerate.frequency)
windowed = sliding_window(x, filter_size, 1, flatten=False)
print(windowed.shape)
filtered = np.dot(windowed, filter_bank.T)
print(filtered.shape)
corr = sliding_window(
filtered,
ws=(corr_win_samples, n_filters),
ss=(1, n_filters),
flatten=False)
print(corr.shape)
padded_shape = list(corr.shape)
padded_shape[2] = corr_win_samples * 2
padded = np.zeros(padded_shape, dtype=np.float32)
padded[:, :, :corr_win_samples, :] = corr
print(padded.shape)
coeffs = np.fft.fft(padded, axis=2, norm='ortho')
correlated = np.fft.ifft(np.abs(coeffs) ** 2, axis=2, norm='ortho')
return np.concatenate([
correlated[:, :, corr_win_samples:, :],
correlated[:, :, :corr_win_samples, :],
], axis=2)
return correlated
def dct_basis(size):
r = np.arange(size)
basis = np.outer(r, r + 0.5)
basis = np.cos((np.pi / size) * basis)
return basis
def frequency_decomposition(x, sizes):
sizes = sorted(sizes)
if x.ndim == 1:
end = x.dimensions[0].end
x = ArrayWithUnits(
x[None, ...], [TimeDimension(end, end), x.dimensions[0]])
original_size = x.shape[-1]
time_dimension = x.dimensions[-1]
samplerate = audio_sample_rate(time_dimension.samples_per_second)
data = x.copy()
bands = []
frequency_bands = []
start_hz = 0
for size in sizes:
if size != original_size:
s = resample(data, size, axis=-1)
else:
s = data.copy()
bands.append(s)
data -= resample(s, original_size, axis=-1)
stop_hz = samplerate.nyquist * (size / original_size)
frequency_bands.append(FrequencyBand(start_hz, stop_hz))
start_hz = stop_hz
scale = ExplicitScale(frequency_bands)
return FrequencyAdaptive(bands, scale=scale, time_dimension=x.dimensions[0])
| {
"repo_name": "JohnVinyard/zounds",
"path": "zounds/spectral/functional.py",
"copies": "1",
"size": "14161",
"license": "mit",
"hash": 7143041640942476000,
"line_mean": 32.7971360382,
"line_max": 80,
"alpha_frac": 0.6375255985,
"autogenerated": false,
"ratio": 3.4197053851726635,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9555614293989148,
"avg_score": 0.0003233379367030918,
"num_lines": 419
} |
from fresh_tomatoes import open_movies_page
class Movie:
"""Store movie related infromation"""
def __init__(self, title, poster_url, trailer_url):
self.title = title
self.poster_image_url = poster_url
self.trailer_youtube_url = trailer_url
# movies title list
movies_title = ["The Shawshank", "The Godfather", "The Godfather: Part II",
"The Dark Knight", "Pulp Fiction", "Schindler's List"]
# movies poster url list
movies_poster = ["http://ia.media-imdb.com/images/M/MV5BODU4MjU4NjIwNl5BMl5BanBnXkFtZTgwMDU2MjEyMDE@._V1_SX214_AL_.jpg", "http://ia.media-imdb.com/images/M/MV5BMjEyMjcyNDI4MF5BMl5BanBnXkFtZTcwMDA5Mzg3OA@@._V1_SX214_AL_.jpg", "http://ia.media-imdb.com/images/M/MV5BNDc2NTM3MzU1Nl5BMl5BanBnXkFtZTcwMTA5Mzg3OA@@._V1_SX214_AL_.jpg",
"http://ia.media-imdb.com/images/M/MV5BMTMxNTMwODM0NF5BMl5BanBnXkFtZTcwODAyMTk2Mw@@._V1_SY317_CR0,0,214,317_AL_.jpg", "http://ia.media-imdb.com/images/M/MV5BMjE0ODk2NjczOV5BMl5BanBnXkFtZTYwNDQ0NDg4._V1_SY317_CR4,0,214,317_AL_.jpg", "http://ia.media-imdb.com/images/M/MV5BMzMwMTM4MDU2N15BMl5BanBnXkFtZTgwMzQ0MjMxMDE@._V1_SX214_AL_.jpg"]
# movies trailer url list
movies_trailer = ["https://www.youtube.com/watch?v=6hB3S9bIaco", "https://www.youtube.com/watch?v=sY1S34973zA", "https://www.youtube.com/watch?gl=SG&hl=en-GB&v=qJr92K_hKl0",
"https://www.youtube.com/watch?v=EXeTwQWrcwY", "https://www.youtube.com/watch?v=s7EdQ4FqbhY", "https://www.youtube.com/watch?v=dwfIf1WMhgc"]
movies = []
for i in range(0, 6):
movie = Movie(movies_title[i], movies_poster[i], movies_trailer[i])
movies.append(movie)
open_movies_page(movies)
| {
"repo_name": "li-xinyang/FSND_P1_MovieTrailerWebsite",
"path": "server.py",
"copies": "1",
"size": "1671",
"license": "mit",
"hash": 4686140828069327000,
"line_mean": 52.9032258065,
"line_max": 352,
"alpha_frac": 0.7145421903,
"autogenerated": false,
"ratio": 2.2220744680851063,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8433728140726962,
"avg_score": 0.0005777035316288867,
"num_lines": 31
} |
from ..fr_FR import Provider as CompanyProvider
class Provider(CompanyProvider):
company_suffixes = ('SA', 'Sàrl.')
def ide(self):
"""
Generates a IDE number (9 digits).
http://www.bfs.admin.ch/bfs/portal/fr/index/themen/00/05/blank/03/02.html
"""
def _checksum(digits):
factors = (5, 4, 3, 2, 7, 6, 5, 4)
sum_ = 0
for i in range(len(digits)):
sum_ += digits[i] * factors[i]
return sum_ % 11
while True:
# create an array of first 8 elements initialized randomly
digits = self.generator.random.sample(range(10), 8)
# sum those 8 digits according to (part of) the "modulo 11"
sum_ = _checksum(digits)
# determine the last digit to make it qualify the test
control_number = 11 - sum_
if control_number != 10:
digits.append(control_number)
break
digits = ''.join([str(digit) for digit in digits])
# finally return our random but valid BSN
return 'CHE-' + digits[0:3] + '.'\
+ digits[3:6] + '.'\
+ digits[6:9]
uid = ide
# uid: german name for ide
idi = ide
# idi: italian name for ide
| {
"repo_name": "danhuss/faker",
"path": "faker/providers/company/fr_CH/__init__.py",
"copies": "2",
"size": "1307",
"license": "mit",
"hash": -2005712102381753000,
"line_mean": 33.3684210526,
"line_max": 81,
"alpha_frac": 0.5206738132,
"autogenerated": false,
"ratio": 3.886904761904762,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5407578575104762,
"avg_score": null,
"num_lines": null
} |
from friendly.silverpop.helpers import to_python, pep_up
from .constants import (
LIST_TYPE_DATABASE,
LIST_TYPE_QUERY,
LIST_TYPE_TEST_LIST,
LIST_TYPE_SEED_LIST,
LIST_TYPE_SUPPRESSION_LIST,
LIST_TYPE_RELATIONAL_TABLE,
LIST_TYPE_CONTACT_LIST,
CONTACT_CREATED_MANUALLY)
class Resource(object):
_str_keys = []
_int_keys = []
_date_keys = []
_bool_keys = []
_dict_keys = []
_object_map = {}
# _pks = []
@classmethod
def from_element(cls, el, api):
return to_python(
obj=cls(),
in_el=el,
str_keys=cls._str_keys,
int_keys=cls._int_keys,
date_keys=cls._date_keys,
bool_keys=cls._bool_keys,
dict_keys=cls._dict_keys,
object_map=cls._object_map,
api=api
)
class Session(object):
def __init__(self, session_id):
self.id = session_id
def __str__(self):
return self.id
def close(self):
pass
class Mailing(object):
pass
class Table(object):
def __init__(self):
self._columns = {}
@property
def key_columns(self):
"""Returns a list of key columns of the table"""
return [str(column) for id, column in self._columns.iteritems() if column.is_key]
@property
def column_names(self):
return [str(column) for id, column in self._columns.iteritems()]
def has_column(self, column_name):
"""Checks wether the table contains the given column
Args:
column_name (str): Name of the column to check for
Returns:
bool -- Wether the column exists or not
"""
return column_name in self._columns
def __getitem__(self, key):
return self._columns[key]
def add_column(self, column, replace=False, **kwargs):
"""Adds/replaces a column at the table
Args:
column (Column): Column to add
replace (bool): Wether to replace existing columns with the same id or not
Raises:
ValueError
"""
if not isinstance(column, Column):
raise ValueError('Invalid value. Must be column')
if not hasattr(column, 'id'):
raise Exception('No id at column')
# We return silently if the column already exists
if str(column) in self._columns and replace is False:
# raise Exception('Column "{0}" already exists'.format(str(column)))
return
self._columns[str(column)] = column
def drop_column(self, column):
column_name = None
if isinstance(column, Column):
column_name = column.name
elif isinstance(column, basestring):
column_name = column
else:
raise ValueError('Invalid column type')
if not column_name in self.column_names:
raise Exception('No column %s' % column_name)
if self._columns[column_name].is_key:
raise Exception('Cannot delete key columns')
del self._columns[column_name]
class Contact(Resource):
_str_keys = ('EMAIL', 'ORGANIZATION_ID')
_int_keys = ('RecipientId', 'EmailType', 'CreatedFrom')
_date_keys = ('LastModified', )
_bool_keys = []
_dict_keys = None
_object_map = {}
def __init__(self, **kwargs):
table = kwargs.get('from_table')
# if table is None:
# table = Table()
self.__dict__['_table'] = table
def __setattr__(self, name, value):
if self._table and not self._table.has_column(name):
raise ValueError(
'Contact has no field "{0}". Available fields: {1}'.format(name, ', '.join(self._table.column_names)))
# @todo Validate type
super(Contact, self).__setattr__(name, value)
class List(Resource):
_str_keys = ('NAME', 'PARENT_NAME', 'USER_ID')
_int_keys = ('ID', 'TYPE', 'SIZE', 'NUM_OPT_OUTS', 'NUM_UNDELIVERABLE', 'VISIBILITY',
'PARENT_FOLDER_ID', 'SUPPRESSION_LIST_ID')
_date_keys = ('LAST_MODIFIED',)
_bool_keys = ('IS_FOLDER', 'FLAGGED_FOR_BACKUP')
_dict_keys = None
_object_map = {}
# def __init__(self):
# self._contacts = {}
@classmethod
def from_element(cls, el, api):
type = int(el.find('TYPE').text)
if not type in LIST_TYPE_MAP:
raise Exception("Unsupported type %d", type)
list_class = LIST_TYPE_MAP[type]
return super(cls, list_class).from_element(el, api)
def add_contact(self, contact, created_from=CONTACT_CREATED_MANUALLY):
if not isinstance(contact, Contact):
raise ValueError('Invalid contact')
return self.api.add_recipient(self.id, created_from, contact)
def get_recipient_data(self, contact):
if not isinstance(contact, Contact):
raise ValueError('Invalid contact')
return self.api.select_recipient_data(self.id, contact.email)
# if not contact.id in self._contacts:
# self._contacts[contact.id] = contact
#Column = collections.namedtuple('Column', ["title", "url", "dateadded", "format", "owner", "sizes", "votes"])
class MetaDataMixin(object):
def get_meta_data(self):
return self.api.get_list_meta_data(self)
class Column(object):
def __init__(self, column_name, column_type=None, default_value=None, **kwargs):
self._mapping = {} # Map for remembering old and ugly column names
self.id = pep_up(column_name) # Clean up the name
self._mapping[self.id] = column_name # Remember the name
self.name = column_name
self.type = int(column_type) if column_type else None
self.default = default_value
self.is_key = kwargs.get('is_key', False)
def __str__(self):
return self.id
def __repr__(self):
return "<Column name={0} type={1}>".format(self.name, self.type)
class Database(List, MetaDataMixin):
def __repr__(self):
return "<Database '{0}' '{1}'>".format(self.id, self.name)
def create_contact(self):
"""Creates a new contact.
Returns:
Contact -- A new instance of a Contact.
NOTE: The instance acts only as a DTO and won't be persistet
until calling ``add_contact``.
"""
if not hasattr(self, '_table'):
self.get_meta_data()
return Contact(from_table=self._table)
class Query(List, MetaDataMixin):
def __repr__(self):
return "<Query '{0}' '{1}'>".format(self.id, self.name)
class TestList(List):
def __repr__(self):
return "<TestList '{0}' '{1}'>".format(self.id, self.name)
class SeedList(List):
def __repr__(self):
return "<SeedList '{0}' '{1}'>".format(self.id, self.name)
class SuppressionList(List):
def __repr__(self):
return "<SuppressionList '{0}' '{1}'>".format(self.id, self.name)
class RelationalTable(List, MetaDataMixin):
def __repr__(self):
return "<RelationalTable '{0}' '{1}'>".format(self.id, self.name)
# def import(self):
# raise NotImplementedError()
def export(self):
raise NotImplementedError()
def purge(self):
raise NotImplementedError()
def delete(self):
raise NotImplementedError()
class ContactList(List):
def __repr__(self):
return "<ContactList '{0}' '{1}'>".format(self.id, self.name)
LIST_TYPE_MAP = {
LIST_TYPE_DATABASE: Database,
LIST_TYPE_QUERY: Query,
LIST_TYPE_TEST_LIST: TestList,
LIST_TYPE_SEED_LIST: SeedList,
LIST_TYPE_SUPPRESSION_LIST: SuppressionList,
LIST_TYPE_RELATIONAL_TABLE: RelationalTable,
LIST_TYPE_CONTACT_LIST: ContactList,
}
| {
"repo_name": "butfriendly/friendly-silverpop",
"path": "friendly/silverpop/engage/resources.py",
"copies": "1",
"size": "7753",
"license": "isc",
"hash": 7540538281430424000,
"line_mean": 26.590747331,
"line_max": 118,
"alpha_frac": 0.5871275635,
"autogenerated": false,
"ratio": 3.7599418040737147,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4847069367573715,
"avg_score": null,
"num_lines": null
} |
from friendsecure.node import Node
from mock import MagicMock
import json
def test_message_received():
class Screen(object):
def __init__(self):
self.addLine = MagicMock()
screen = Screen()
class Peer(object):
def __init__(self, host='127.0.0.1', port=8888):
self.host = host
self.port = port
protocol = MagicMock()
protocol.transport = MagicMock()
protocol.transport.getPeer = MagicMock(return_value=Peer())
nod = Node('public_key', 'private_key', screen)
host = '127.0.0.1'
port = 8888
nod._contacts[(host, port)] = protocol
msg = {
'type': 'message',
'message': 'hello'
}
nod.message_received(msg, protocol)
assert 1 == nod._screen.addLine.call_count
assert '[THEM] hello\n' == nod._screen.addLine.call_args[0][0]
def test_send_message():
nod = Node('public_key', 'private_key', MagicMock())
host = '127.0.0.1'
port = 8888
protocol = MagicMock()
nod._contacts[(host, port)] = protocol
msg = {
'type': 'message',
'message': 'hello'
}
nod.send_message(host, port, msg)
assert 1 == protocol.sendMessage.call_count
assert msg == protocol.sendMessage.call_args[0][0]
| {
"repo_name": "hpk42/p4p",
"path": "tests/test_node.py",
"copies": "1",
"size": "1258",
"license": "mit",
"hash": 3165935239622146600,
"line_mean": 25.2083333333,
"line_max": 66,
"alpha_frac": 0.5953895072,
"autogenerated": false,
"ratio": 3.4751381215469612,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9570527628746961,
"avg_score": 0,
"num_lines": 48
} |
from friendsecure.protocol import MessageProtocol
from mock import MagicMock
import json
def test_stringReceived():
proto = MessageProtocol()
# *SIGH*
class C(object):
def __init__(self, name, val):
setattr(self, name, val)
node = C('message_received', MagicMock())
factory = C('node', node)
setattr(proto, 'factory', factory)
msg = {
'type': 'message',
'message': 'hello'
}
proto.stringReceived(json.dumps(msg))
assert 1 == proto.factory.node.message_received.call_count
assert msg == proto.factory.node.message_received.call_args[0][0]
def test_sendMessage():
proto = MessageProtocol()
proto.sendString = MagicMock()
msg_dict = {
'type': 'message',
'message': 'hello'
}
proto.sendMessage(msg_dict)
assert 1 == proto.sendString.call_count
assert json.dumps(msg_dict) == proto.sendString.call_args[0][0]
def test_error():
proto = MessageProtocol()
proto.sendMessage = MagicMock()
expected_message = {
'type': 'error',
'description': 'BANG!'
}
proto.error('BANG!')
assert 1 == proto.sendMessage.call_count
assert expected_message == proto.sendMessage.call_args[0][0]
| {
"repo_name": "hpk42/p4p",
"path": "tests/test_protocol.py",
"copies": "1",
"size": "1242",
"license": "mit",
"hash": 990257023828552000,
"line_mean": 25.4255319149,
"line_max": 69,
"alpha_frac": 0.6264090177,
"autogenerated": false,
"ratio": 3.685459940652819,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9806803102223133,
"avg_score": 0.0010131712259371832,
"num_lines": 47
} |
from frigg.helpers.common import is_retest_comment
from .base import Event
class GithubEvent(Event):
REPOSITORY_URL = 'git@github.com:{event.repository_owner}/{event.repository_name}.git'
ALLOWED_EVENT_TYPES = ['ping', 'push', 'delete', 'pull_request', 'issue_comment']
ALLOWED_PULL_REQUEST_ACTIONS = ['opened', 'synchronize']
ALLOWED_COMMENT_EVENTS = ['issue_comment']
@property
def repository_owner(self):
try:
if 'name' in self.data['repository']['owner']:
return self.data['repository']['owner']['name']
return self.data['repository']['owner']['login']
except KeyError:
return None
@property
def repository_name(self):
try:
return self.data['repository']['name']
except KeyError:
return None
@property
def repository_private(self):
try:
return self.data['repository']['private']
except KeyError:
return None
@property
def branch(self):
if self.event_type == 'push':
if self.data['ref'].startswith('refs/tags/'):
return self.data['ref'].replace('refs/tags/', '')
return self.data['ref'][11:]
elif self.event_type == 'pull_request':
return self.data['pull_request']['head']['ref']
elif self.event_type == 'delete':
return self.data['ref']
@property
def hash(self):
if self.event_type == 'push':
return self.data['after']
elif self.event_type == 'pull_request':
return self.data['pull_request']['head']['sha']
@property
def pull_request_id(self):
if self.event_type == 'pull_request':
return self.data['number']
elif self.event_type == 'issue_comment':
if 'pull_request' in self.data['issue']:
return int(self.data['issue']['pull_request']['url'].split("/")[-1])
return 0
@property
def commit(self):
if len(self.data['commits']):
return self.data['commits'][-1]
elif 'head_commit' in self.data:
return self.data['head_commit']
@property
def author(self):
if self.event_type == 'push':
if self.commit and 'username' in self.commit['author']:
return self.commit['author']['username']
elif self.event_type == 'pull_request':
return self.data['pull_request']['user']['login']
return ''
@property
def message(self):
if self.event_type == 'push':
if self.commit:
return self.commit['message']
elif self.event_type == 'pull_request':
return '{}\n{}'.format(
self.data['pull_request']['title'] or '',
self.data['pull_request']['body'] or ''
)
return ''
@property
def is_unknown_event_type(self):
if self.event_type == 'ping':
return 'repository' not in self.data
if self.event_type == 'push':
return self.data['deleted']
if self.event_type == 'pull_request':
return self.data['action'] not in self.ALLOWED_PULL_REQUEST_ACTIONS
if self.event_type == 'issue_comment':
return not self.is_retest_comment_event
else:
return super().is_unknown_event_type
@property
def is_retest_comment_event(self):
if self.event_type in self.ALLOWED_COMMENT_EVENTS:
return 'pull_request' in self.data['issue'] and \
is_retest_comment(self.data['comment']['body'])
return False
| {
"repo_name": "frigg/frigg-hq",
"path": "frigg/webhooks/events/github.py",
"copies": "1",
"size": "3666",
"license": "mit",
"hash": -652788898005573400,
"line_mean": 32.9444444444,
"line_max": 90,
"alpha_frac": 0.5564648118,
"autogenerated": false,
"ratio": 4.105263157894737,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00031834836896396407,
"num_lines": 108
} |
from frmwk import flask_framework
# from frmwk import db, lm, oid, babel
from flask.ext.login import login_required, current_user
# from flask.ext.login import login_user, logout_user
from flask import render_template, jsonify, g
# from flask import flash, redirect, session, url_for, request
# from frmwk.forms.app_forms import LeaseForm
from frmwk.forms.fmLease import LeaseForm
'''
@flask_framework.route('/lease', methods = ['GET', 'POST'])
@login_required
def editLease():
form = LeaseForm(g.user.nickname)
if form.validate_on_submit():
g.user.nickname = form.nickname.data
g.user.about_me = form.about_me.data
orm_db.session.add(g.user)
orm_db.session.commit()
flash(gettext('Your changes have been saved.'))
return redirect(url_for('edit'))
elif request.method != "POST":
form.nickname.data = g.user.nickname
form.about_me.data = g.user.about_me
return render_template('lease.html',
form = form)
return render_template('lease.html')
'''
@flask_framework.route('/v1/appl')
@flask_framework.route('/v1/appl/<int:page>')
def api_appl(page = 1, internal = False):
usr = 'Willy'
a = ['spam', 'eggs', 100, 1234, 9999]
pyld = {'name': usr, 'lstData': a}
if internal:
return pyld
return jsonify(pyld)
@flask_framework.route('/appl')
@flask_framework.route('/appl/<int:page>')
@login_required
def appl(page = 1):
pyld = api_appl(page, True)
return render_template('appl.html',
user = current_user, payload = pyld)
| {
"repo_name": "martinhbramwell/evalOfFlask",
"path": "frmwk/view/overview.py",
"copies": "1",
"size": "1527",
"license": "bsd-3-clause",
"hash": -6465848023048864000,
"line_mean": 28.9411764706,
"line_max": 62,
"alpha_frac": 0.6751800917,
"autogenerated": false,
"ratio": 3.1226993865030677,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.42978794782030677,
"avg_score": null,
"num_lines": null
} |
from frmwk import flask_framework, orm_db
from flask.ext.login import login_required, current_user
from flask.ext.babel import gettext
from flask import render_template, flash, request, redirect, url_for, g
from frmwk import administrator_permission
# from flask import Response
from frmwk.model.mdRole import Role
from frmwk.forms.fmRole import RoleForm
@flask_framework.route('/roles')
@flask_framework.route('/roles/<int:page>')
@login_required
def roles(page = 1):
print 'roles or roles with ' + str(page)
return renderThem({'pageNum': page})
@flask_framework.route('/newrole', methods = ['GET', 'POST'])
@login_required
def newrole():
print 'newrole'
if administrator_permission.can():
role = Role()
form = RoleForm(role)
if form.validate_on_submit():
print 'saving * * * * '
return saveIt(role, form)
return renderIt({'key': 'new', 'form': form})
else:
flash(gettext('You are not authorised to create new roles. You can request permission below.'), 'error')
return redirect(url_for('roles'))
@flask_framework.route('/role/<role_id>', methods = ['GET', 'POST'])
@login_required
def role(role_id = None):
print 'role/id with ' + str(role_id)
if administrator_permission.can():
role = Role.query.filter_by(id = role_id).first()
form = RoleForm(role)
if form.validate_on_submit():
print "Saving {} with key {}.".format(form.name.data, form.role_id.data)
return saveIt(role, form)
elif request.method != "POST":
form.name.data = role.name
form.role_id.data = role.id
return renderIt({'key': role_id, 'form': form})
else:
flash(gettext('You are not authorised to edit roles. You can request permission below.'), 'error')
return redirect(url_for('edit', nickname = g.user.nickname))
def saveIt(role, form):
role.name = form.name.data
role.id = form.role_id.data
orm_db.session.add(role)
orm_db.session.commit()
flash(gettext('Your changes have been saved.'), 'success')
return redirect(url_for('roles'))
def renderIt(pyld):
pyld['pageNum'] = 1
return renderThem(pyld)
def renderThem(pyld):
pyld['page'] = 'Role'
pyld['records'] = Role.query.all() # .paginate(page, POSTS_PER_PAGE, False)
records = pyld['records']
return render_template('role.html', payload = pyld)
| {
"repo_name": "martinhbramwell/evalOfFlask",
"path": "frmwk/view/vwRole.py",
"copies": "1",
"size": "2472",
"license": "bsd-3-clause",
"hash": -9184709729844391000,
"line_mean": 32.4054054054,
"line_max": 112,
"alpha_frac": 0.6371359223,
"autogenerated": false,
"ratio": 3.5517241379310347,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9560825007634216,
"avg_score": 0.025607010519363815,
"num_lines": 74
} |
from frmwk import flask_framework, orm_db
# from frmwk import db, lm, oid, babel
from flask.ext.login import login_required, current_user
# from flask.ext.login import login_user, logout_user
from flask.ext.babel import gettext
from flask import render_template, flash, request, redirect, url_for
# from flask import flash, session, g, jsonify
from frmwk import comptroller_permission
# from flask import Response
from frmwk.model.mdLease import Lease
from frmwk.forms.fmLease import LeaseForm
@flask_framework.route('/leases')
@flask_framework.route('/leases/<int:page>')
@login_required
def leases(page = 1):
print 'leases or leases with ' + str(page)
return renderThem({'pageNum': page})
@flask_framework.route('/newlease', methods = ['GET', 'POST'])
@login_required
def newlease():
print 'newlease'
if comptroller_permission.can():
lease = Lease()
form = LeaseForm(lease)
if form.validate_on_submit():
print 'saving * * * * '
return saveIt(lease, form)
return renderIt({'key': 'new', 'form': form})
else:
flash(gettext('You are not authorised to create new leases. You can request permission in "Your Profile"'), 'error')
return redirect(url_for('leases'))
@flask_framework.route('/lease/<official_id>', methods = ['GET', 'POST'])
@login_required
def lease(official_id = None):
print 'lease/id with ' + str(official_id)
lease = Lease.query.filter_by(official_id = official_id).first()
form = LeaseForm(lease)
if form.validate_on_submit():
print 'saving * * * * '
return saveIt(lease, form)
elif request.method != "POST":
form.nick_name.data = lease.nick_name
form.official_id.data = lease.official_id
form.official_name.data = lease.official_name
return renderIt({'key': official_id, 'form': form})
def saveIt(lease, form):
lease.nick_name = form.nick_name.data
lease.official_id = form.official_id.data
lease.official_name = form.official_name.data
orm_db.session.add(lease)
orm_db.session.commit()
flash(gettext('Your changes have been saved.'), 'success')
return redirect(url_for('lease', official_id = lease.official_id))
def renderIt(pyld):
pyld['pageNum'] = 1
return renderThem(pyld)
def renderThem(pyld):
pyld['page'] = 'Lease'
pyld['records'] = Lease.query.filter().order_by(Lease.id) # .paginate(page, POSTS_PER_PAGE, False)
return render_template('lease.html', payload = pyld)
| {
"repo_name": "martinhbramwell/evalOfFlask",
"path": "frmwk/view/vwLease.py",
"copies": "1",
"size": "2537",
"license": "bsd-3-clause",
"hash": -4576220857533282300,
"line_mean": 33.7534246575,
"line_max": 124,
"alpha_frac": 0.6673236106,
"autogenerated": false,
"ratio": 3.396251673360107,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9455893287970716,
"avg_score": 0.0215363991978783,
"num_lines": 73
} |
from froide.celery import app as celery_app
from .utils import (
run_guidance, run_guidance_on_queryset, GuidanceApplicator,
GuidanceResult, notify_users
)
@celery_app.task(name='froide.guide.tasks.run_guidance_task')
def run_guidance_task(message_id):
from froide.foirequest.models import FoiMessage
try:
message = FoiMessage.objects.get(id=message_id)
except FoiMessage.DoesNotExist:
return
run_guidance(message)
@celery_app.task(name='froide.guide.tasks.run_guidance_on_queryset_task')
def run_guidance_on_queryset_task(message_ids, notify=False):
from froide.foirequest.models import FoiMessage
queryset = FoiMessage.objects.filter(id__in=message_ids)
run_guidance_on_queryset(queryset, notify=notify)
@celery_app.task(name='froide.guide.tasks.add_action_to_queryset_task')
def add_action_to_queryset_task(action_id, message_ids):
from froide.foirequest.models import FoiMessage
from .models import Action
try:
action = Action.objects.get(id=action_id)
except Action.DoesNotExist:
return
queryset = FoiMessage.objects.filter(id__in=message_ids)
for message in queryset:
applicator = GuidanceApplicator(message)
guidance = applicator.apply_action(action)
notify_users([(message, GuidanceResult(
[guidance], applicator.created_count, 0
))])
| {
"repo_name": "stefanw/froide",
"path": "froide/guide/tasks.py",
"copies": "1",
"size": "1390",
"license": "mit",
"hash": -6140929993998612000,
"line_mean": 30.5909090909,
"line_max": 73,
"alpha_frac": 0.7179856115,
"autogenerated": false,
"ratio": 3.3737864077669903,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.45917720192669903,
"avg_score": null,
"num_lines": null
} |
from froide.publicbody.api_views import PublicBodySerializer
from rest_framework import serializers
from .models import InformationObject
class CampaignProviderItemSerializer(serializers.Serializer):
ident = serializers.CharField()
title = serializers.CharField()
subtitle = serializers.CharField(required=False)
address = serializers.CharField(required=False)
request_url = serializers.CharField(required=False)
publicbody_name = serializers.CharField(required=False)
description = serializers.CharField()
foirequest = serializers.IntegerField(min_value=0, required=False)
foirequests = serializers.ListField(
child=serializers.DictField(required=False)
)
lat = serializers.FloatField(required=False)
lng = serializers.FloatField(required=False)
resolution = serializers.CharField(required=False)
context = serializers.DictField(required=False)
categories = serializers.ListField(required=False)
featured = serializers.BooleanField(required=False)
class CampaignProviderRequestSerializer(serializers.Serializer):
ident = serializers.CharField()
lat = serializers.FloatField(required=False)
lng = serializers.FloatField(required=False)
name = serializers.CharField(required=False)
address = serializers.CharField(required=False)
publicbody = PublicBodySerializer(required=False)
publicbodies = PublicBodySerializer(many=True, required=False)
makeRequestURL = serializers.CharField(required=False)
userRequestCount = serializers.IntegerField(required=False)
class InformationObjectSerializer(serializers.ModelSerializer):
lat = serializers.FloatField(source='get_latitude', required=False)
lng = serializers.FloatField(source='get_longitude', required=False)
ident = serializers.CharField(required=False)
request_url = serializers.SerializerMethodField()
resolution = serializers.SerializerMethodField()
foirequest = serializers.SerializerMethodField()
categories = serializers.SerializerMethodField()
title = serializers.SerializerMethodField()
subtitle = serializers.SerializerMethodField()
class Meta:
model = InformationObject
fields = (
'title', 'subtitle', 'address', 'campaign', 'lat', 'lng',
'request_url', 'foirequests', 'ident', 'resolution',
'id', 'foirequest', 'categories'
)
def get_title(self, obj):
obj.set_current_language(self.context.get('language'))
return obj.title
def get_subtitle(self, obj):
obj.set_current_language(self.context.get('language'))
return obj.subtitle
def get_categories(self, obj):
categories = obj.categories.language(
self.context.get('language'))
return [{'id': cat.id, 'title': cat.title}
for cat in categories]
def get_request_url(self, obj):
provider = obj.campaign.get_provider()
return provider.get_request_url_redirect(obj.ident)
def get_foirequest(self, obj):
foirequest = obj.get_best_foirequest()
if foirequest:
return foirequest.id
def get_resolution(self, obj):
return obj.get_resolution()
def create(self, validated_data):
title = self.context.get('request').data.get('title')
iobj = InformationObject.objects.create(**validated_data)
iobj.title = title
iobj.save()
return iobj
| {
"repo_name": "okfde/froide-campaign",
"path": "froide_campaign/serializers.py",
"copies": "1",
"size": "3451",
"license": "mit",
"hash": 1626687117393133600,
"line_mean": 37.3444444444,
"line_max": 72,
"alpha_frac": 0.709649377,
"autogenerated": false,
"ratio": 4.324561403508772,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5534210780508771,
"avg_score": null,
"num_lines": null
} |
from fro._implementation.chompers import abstract
from fro._implementation.chompers.box import Box
class ChainChomper(abstract.AbstractChomper):
def __init__(self, func, significant=True, name=None):
abstract.AbstractChomper.__init__(self, significant=significant, name=name)
self._generation_func = func
self._chomper = None
def _chomp(self, state, tracker):
if self._chomper is None:
lazier = ChainChomper(self._generation_func, significant=self._significant, name=self._name)
self._chomper = self._generation_func(lazier)
return self._chomper.chomp(state, tracker)
class OptionalChomper(abstract.AbstractChomper):
def __init__(self, child, default=None, significant=True, name=None):
abstract.AbstractChomper.__init__(self, significant, name)
self._child = child
self._default = default
def _chomp(self, state, tracker):
line = state.line()
col = state.column()
box = self._child.chomp(state, tracker)
if box is not None:
return box
elif state.line() != line:
self._failed_lookahead(state, tracker)
state.reset_to(col)
return Box(self._default)
class StubChomper(abstract.AbstractChomper):
def __init__(self, significant=True, name=None):
abstract.AbstractChomper.__init__(self, significant, name)
self._delegate = None
def set_delegate(self, delegate):
if self._delegate is not None:
raise AssertionError("Cannot set a stub's delegate twice")
self._delegate = delegate
def _chomp(self, state, tracker):
if self._delegate is None:
raise ValueError("Stub chomper has no delegate")
return self._delegate.chomp(state, tracker)
class ThunkChomper(abstract.AbstractChomper):
def __init__(self, thunk, significant=True, name=None):
abstract.AbstractChomper.__init__(self, significant=significant, name=name)
self._thunk = thunk
def _chomp(self, state, tracker):
return self._thunk().chomp(state, tracker)
| {
"repo_name": "ethantkoenig/fro",
"path": "fro/_implementation/chompers/util.py",
"copies": "1",
"size": "2115",
"license": "mit",
"hash": -3973277758464495000,
"line_mean": 35.4655172414,
"line_max": 104,
"alpha_frac": 0.653427896,
"autogenerated": false,
"ratio": 3.8524590163934427,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5005886912393442,
"avg_score": null,
"num_lines": null
} |
from fro._implementation import iters
from fro._implementation.chompers import abstract
from fro._implementation.chompers.box import Box
class SequenceChomper(abstract.AbstractChomper):
def __init__(self, element, reducer, separator=None,
significant=True, name=None):
abstract.AbstractChomper.__init__(self, significant, name)
self._element = element
self._reducer = reducer
self._separator = separator # self._separator may be None
def _chomp(self, state, tracker):
iterable = SequenceIterable(self, state, tracker)
iterator = iter(iterable)
value = self._reducer(iterator)
iters.close(iterator)
return Box(value)
class SequenceIterable(object):
def __init__(self, chomper, state, tracker):
self._state = state
self._element = chomper._element
self._sep = chomper._separator # may be None
self._tracker = tracker
self._failed_lookahead = chomper._failed_lookahead
def __iter__(self):
# This method is a common hotspot, so it is written for
# efficiency
state = self._state
element = self._element
tracker = self._tracker
sep = self._sep
rollback_line = state._line # state.line()
rollback_col = state._column # state.column()
while True:
box = element.chomp(state, tracker)
if box is None:
if state.line() != rollback_line:
self._failed_lookahead(state, tracker)
state.reset_to(rollback_col)
return
yield box.value
rollback_line = state._line # state.line()
rollback_col = state._column # state.column()
if sep is not None:
box_ = sep.chomp(state, tracker)
if box_ is None:
if state.line() != rollback_line:
self._failed_lookahead(state, tracker)
state.reset_to(rollback_col)
return
| {
"repo_name": "ethantkoenig/fro",
"path": "fro/_implementation/chompers/sequence.py",
"copies": "1",
"size": "2072",
"license": "mit",
"hash": 4644263364839866000,
"line_mean": 35.350877193,
"line_max": 66,
"alpha_frac": 0.5772200772,
"autogenerated": false,
"ratio": 4.352941176470588,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5430161253670588,
"avg_score": null,
"num_lines": null
} |
from fro._implementation import pretty_printing
class FroParseError(Exception):
"""
An exception for parsing failures
"""
def __init__(self, chomp_errors, cause=None):
self._messages = [_message_of_chomp_error(ce) for ce in chomp_errors]
self._location = chomp_errors[0].location()
self._cause = cause
def __str__(self):
"""
A human readable description of the error. Include both the error messages, and extra information describing the
location of the error. Equivalent to ``to_str()``.
:return: a human readable description
:rtype: str
"""
return self.to_str(index_from=1)
def cause(self):
"""
Returns the ``Exception`` that triggered this error, or ``None`` is this error was not triggered by another
exception
:return: the exception that triggered this error
:rtype: Exception
"""
return self._cause
def context(self):
return pretty_printing.printable_string_index_with_context(
self._location.text(),
self.column(0))
def column(self, index_from=1):
"""
Returns the column number where the error occurred, or more generally the
index inside the chunk where the error occurred. Indices are indexed from
``index_from``.
:param int index_from: number to index column numbers by
:return: column number of error
:rtype: int
"""
return self._location.column() + index_from
def line(self, index_from=1):
"""
Returns the line number where the error occurred, or more generally
the index of the chunk where the error occurred. Indices are indexed from
``index_from``.
:param int index_from: number to index line numbers by
:return: row number of error
:rtype: int
"""
return self._location.line() + index_from
def messages(self):
"""
A non-empty list of ``Message`` objects which describe the reasons for failure.
:return: a non-empty list of ``Message`` objects which describe the reasons for failure.
:rtype: List[FroParseError.Message]
"""
return list(self._messages)
def to_str(self, index_from=1, filename=None):
"""
Returns a readable description of the error, with indices starting at ``index_from``, and a
filename of ``filename`` include if a filename is provided. Include both the error messages,
and extra information describing the location of the error. This method is essentially a
configurable version of ``__str__()``.
:param int index_from: number to index column/line numbers by
:param str filename: name of file whose parse trigger the exception
:return: a readable description of the error
:rtype: str
"""
first_line = "At line {l}, column {c}".format(
l=self.line(index_from),
c=self.column(index_from))
if filename is not None:
first_line += " of " + filename
result = "\n".join([
first_line,
"\n".join(str(x) for x in self._messages),
self.context()])
if self._cause is not None:
result += "\n\nCaused by: " + str(self._cause)
return result
class Message(object):
"""
Represents an error message describing a reason for failure
"""
def __init__(self, content, name=None):
self._content = content
self._name = name
def __str__(self):
"""
A string representation of the message that includes both the content and parser name.
:return:
"""
if self._name is None:
return self._content
return "{0} when parsing {1}".format(self._content, self._name)
def content(self):
"""
The content of the error message
:return: the content of the error message
:rtype: str
"""
return self._content
def name(self):
"""
The name of the parser at which the message was generated, or ``None`` if all relevant parsers are unnamed.
:return: name of parser where error occurred
:rtype: str
"""
return self._name
# ----------------------------- internals
def _message_of_chomp_error(chomp_error):
return FroParseError.Message(chomp_error.message(), chomp_error.name())
| {
"repo_name": "ethantkoenig/fro",
"path": "fro/_implementation/parse_error.py",
"copies": "1",
"size": "4624",
"license": "mit",
"hash": 4962636492798149000,
"line_mean": 32.7518248175,
"line_max": 120,
"alpha_frac": 0.5815311419,
"autogenerated": false,
"ratio": 4.624,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.57055311419,
"avg_score": null,
"num_lines": null
} |
from fro._implementation.iters import CheckableIterator
from fro._implementation.location import Location
class ChompState(object):
"""
Represents a position during parsing/chomping
"""
def __init__(self, lines, column=0):
"""
:param lines: iterable<str>
:param column: index at which to start
"""
self._lines = CheckableIterator(lines)
self._column = column
self._line = -1
if self._lines.has_next():
self._curr = next(self._lines)
self._len_curr = len(self._curr)
self._line += 1
def advance_to(self, column):
#self._assert_valid_col(column)
#if column < self._column:
# msg = "Cannot advance column from {0} to {1}".format(self._column, column)
# raise ValueError(msg)
while column == self._len_curr and self._lines.has_next():
self._curr = next(self._lines)
self._len_curr = len(self._curr)
self._line += 1
column = 0 # "recurse" onto start of next line
self._column = column
def at_end(self):
return self._column == self._len_curr and not self._lines.has_next()
def column(self):
return self._column
def current(self):
return self._curr
def line(self):
return self._line
def location(self):
return Location(self._line, self._column, self._curr)
def reset_to(self, column):
#self._assert_valid_col(column)
#if column > self._column:
# msg = "Cannot reset column from {0} to {1}".format(self._column, column)
# raise ValueError(msg)
self._column = column
# def _assert_valid_col(self, column):
# if column < 0:
# raise ValueError("column ({0}) must be non-negative".format(column))
# elif column > len(self._lines.current()):
# msg = "column ({0}) is greater than line length ({1})".format(
# column, len(self._lines.current()))
# raise ValueError(msg)
| {
"repo_name": "ethantkoenig/fro",
"path": "fro/_implementation/chompers/state.py",
"copies": "1",
"size": "2072",
"license": "mit",
"hash": -8617001842106473000,
"line_mean": 31.8888888889,
"line_max": 87,
"alpha_frac": 0.5656370656,
"autogenerated": false,
"ratio": 3.9391634980988592,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5004800563698859,
"avg_score": null,
"num_lines": null
} |
"""License:
# Licensing terms
Traitlets is adapted from enthought.traits, Copyright (c) Enthought, Inc.,
under the terms of the Modified BSD License.
This project is licensed under the terms of the Modified BSD License
(also known as New or Revised or 3-Clause BSD), as follows:
- Copyright (c) 2001-, IPython Development Team
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice, this
list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
Neither the name of the IPython Development Team nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
## About the IPython Development Team
The IPython Development Team is the set of all contributors to the IPython project.
This includes all of the IPython subprojects.
The core team that coordinates development on GitHub can be found here:
https://github.com/jupyter/.
## Our Copyright Policy
IPython uses a shared copyright model. Each contributor maintains copyright
over their contributions to IPython. But, it is important to note that these
contributions are typically only changes to the repositories. Thus, the IPython
source code, in its entirety is not the copyright of any single person or
institution. Instead, it is the collective copyright of the entire IPython
Development Team. If individual contributors want to maintain a record of what
changes/contributions they have specific copyright on, they should indicate
their copyright in the commit message of the change, when they commit the
change to one of the IPython repositories.
With this in mind, the following banner should be used in any source code file
to indicate the copyright and license terms:
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
"""
import copy
try:
from inspect import Signature, Parameter, signature
except ImportError:
from funcsigs import Signature, Parameter, signature
from traitlets import Undefined
def _get_default(value):
"""Get default argument value, given the trait default value."""
return Parameter.empty if value == Undefined else value
def signature_has_traits(cls):
"""Return a decorated class with a constructor signature that contain Trait names as kwargs."""
traits = [
(name, _get_default(value.default_value))
for name, value in cls.class_traits().items()
if not name.startswith('_')
]
# Taking the __init__ signature, as the cls signature is not initialized yet
old_signature = signature(cls.__init__)
old_parameter_names = list(old_signature.parameters)
old_positional_parameters = []
old_var_positional_parameter = None # This won't be None if the old signature contains *args
old_keyword_only_parameters = []
old_var_keyword_parameter = None # This won't be None if the old signature contains **kwargs
for parameter_name in old_signature.parameters:
# Copy the parameter
parameter = copy.copy(old_signature.parameters[parameter_name])
if parameter.kind is Parameter.POSITIONAL_ONLY or parameter.kind is Parameter.POSITIONAL_OR_KEYWORD:
old_positional_parameters.append(parameter)
elif parameter.kind is Parameter.VAR_POSITIONAL:
old_var_positional_parameter = parameter
elif parameter.kind is Parameter.KEYWORD_ONLY:
old_keyword_only_parameters.append(parameter)
elif parameter.kind is Parameter.VAR_KEYWORD:
old_var_keyword_parameter = parameter
# Unfortunately, if the old signature does not contain **kwargs, we can't do anything,
# because it can't accept traits as keyword arguments
if old_var_keyword_parameter is None:
raise RuntimeError(
'The {} constructor does not take **kwargs, which means that the signature can not be expanded with trait names'
.format(cls)
)
new_parameters = []
# Append the old positional parameters (except `self` which is the first parameter)
new_parameters += old_positional_parameters[1:]
# Append *args if the old signature had it
if old_var_positional_parameter is not None:
new_parameters.append(old_var_positional_parameter)
# Append the old keyword only parameters
new_parameters += old_keyword_only_parameters
# Append trait names as keyword only parameters in the signature
new_parameters += [
Parameter(name, kind=Parameter.KEYWORD_ONLY, default=default)
for name, default in traits
if name not in old_parameter_names
]
# Append **kwargs
new_parameters.append(old_var_keyword_parameter)
cls.__signature__ = Signature(new_parameters)
return cls
| {
"repo_name": "maartenbreddels/vaex",
"path": "packages/vaex-jupyter/vaex/jupyter/decorators.py",
"copies": "1",
"size": "5934",
"license": "mit",
"hash": -9085873587535400000,
"line_mean": 39.3673469388,
"line_max": 124,
"alpha_frac": 0.7509268622,
"autogenerated": false,
"ratio": 4.561106840891622,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5812033703091621,
"avg_score": null,
"num_lines": null
} |
from frontend import app
from flask import render_template
from flask import send_from_directory
from flask import request
from flask import redirect
from flask import url_for
from flask import flash
from flask import abort
import os
import models
import forms
from wtfpeewee.orm import model_form
@app.route('/register/', methods=['GET', 'POST'])
def register():
Form = forms.ManualRegisterForm(request.values)
if request.method == 'POST':
if Form.submit.data:
saveFormsToModels(Form)
return redirect(url_for('register'))
return render_template('frontpage.html',
form = Form,
)
@app.route('/add/<modelname>/', methods=['GET', 'POST'])
def add(modelname):
kwargs = listAndEdit(modelname)
return render_template('editpage.html', **kwargs)
@app.route('/add/<modelname>/to/<foreign_table>/<foreign_key>', methods=['GET', 'POST'])
def addto(modelname, foreign_table, foreign_key):
kwargs = listAndEdit(modelname,
action = 'AddTo',
foreign_table = foreign_table,
foreign_key = foreign_key)
return render_template('editpage.html', **kwargs)
@app.route('/edit/<modelname>/<entryid>', methods=['GET', 'POST'])
def edit(modelname, entryid):
kwargs = listAndEdit(modelname, entryid)
#print kwargs
return render_template('editpage.html', **kwargs)
def saveFormsToModels(form):
# needs the form fields to be named modelname_fieldname
editedModels = {}
foreignKeys = []
for formfield in form.data:
if formfield in ['csrf_token']:
continue
try:
modelname, field = formfield.split('_')
except:
continue
value = form[formfield].data
try:
functionName, foreignKeyName = value.split('_')
if functionName == 'ForeignKey':
foreignKeys.append(
dict(
modelname = modelname,
field = field,
foreignKeyName = foreignKeyName,
)
)
continue
except:
pass
try:
setattr(editedModels[modelname], field, value)
except:
editedModels[modelname] = models.ALL_MODELS_DICT[modelname]()
setattr(editedModels[modelname], field, value)
for model in editedModels:
editedModels[model].save()
for key in foreignKeys:
setattr(
editedModels[key['modelname']],
key['field'],
editedModels[key['foreignKeyName']])
print 'start'
print 'Set attr: {}, {}, {}'.format(
editedModels[key['modelname']],
key['field'],
editedModels[key['foreignKeyName']])
for model in editedModels:
editedModels[model].save()
def getFields(model, exclude=['id']):
foreignKeys = {x.column : x.dest_table for x in models.db.get_foreign_keys(model.__name__)}
#fields = [(x, type(model._meta.fields[x]).__name__, foreignKeys) for x in model._meta.sorted_field_names if not x in exclude]
#print foreignKeys
fields = []
for field in model._meta.sorted_field_names:
if not field in exclude:
fieldtype = type(model._meta.fields[field]).__name__
foreignFieldName = '{}_id'.format(field)
if foreignFieldName in foreignKeys:
foreignKeyModelName = foreignKeys[foreignFieldName].title()
else:
foreignKeyModelName = False
fields.append(
(field, fieldtype, foreignKeyModelName))
#print "Field: {}\nType: {}\nModelname: {}\n".format(field, fieldtype, foreignKeyModelName)
return fields
def getRelatedModels(entry):
entries = {}
models = []
try:
for query, fk in reversed(list(entry.dependencies())):
#for x in dir(fk):
#print x
for x in fk.model_class.select().where(query):
#print 'here:'
#print x
modelname = fk.model_class.__name__
try:
entries[modelname].append(x)
except:
models.append(modelname)
entries[modelname] = []
entries[modelname].append(x)
#entries.append((fk.model_class.__name__, x))
except:
pass
return (models, entries)
def listAndEdit(modelname, entryid = 0, entries = False, action = False, **kwargs):
try:
model = models.ALL_MODELS_DICT[modelname]
except KeyError:
abort(404)
if not entries:
entries = model.select()
modelForm = model_form(model)
fields = getFields(model)
try:
entry = model.get(id=int(entryid))
dependencies = getRelatedModels(entry)
except:
entry = model()
dependencies = False
form = modelForm(obj = entry)
if request.method == 'POST':
if request.form['submit'] == 'Save':
form = modelForm(request.values, obj = entry)
if form.validate():
form.populate_obj(entry)
entry.save()
if action == 'AddTo':
addForeignKey(model, entry, kwargs['foreign_table'], kwargs['foreign_key'])
redirect(url_for('edit', modelname = model, entryid = kwargs['foreign_key']))
flash('Your entry has been saved')
print 'saved'
elif request.form['submit'] == 'Delete':
try:
model.get(model.id == int(entryid)).delete_instance(recursive = True)
#redirect(url_for('add', modelname = modelname))
except:
pass
finally:
entry = model()
form = modelForm(obj = entry)
kwargs = dict(
links = [x.__name__ for x in models.ALL_MODELS],
header = model.__name__,
form=form,
entry=entry,
entries=entries,
fields = fields,
dependencies = dependencies,
)
return kwargs
def addForeignKey(model, entry, foreign_table, foreign_key):
foreignModel = models.ALL_MODELS_DICT[foreign_table]
foreignItem = foreignModel.get(foreignModel.id == int(foreign_key))
foreignFieldName = model.__name__.lower()
print "entry = {}".format(foreignModel)
print "item = {}".format(foreignItem)
print "fieldName = {}".format(foreignFieldName)
print "id = {}".format(entry.id)
setattr(foreignItem, foreignFieldName, entry.id)
foreignItem.save()
@app.route('/favicon.ico')
def favicon():
return send_from_directory(
os.path.join(app.root_path, 'static'), 'favicon.png', mimetype='image/vnd.microsoft.icon')
| {
"repo_name": "maltonx/workforce",
"path": "views.py",
"copies": "1",
"size": "7037",
"license": "mit",
"hash": 3968290124876270600,
"line_mean": 30.7302325581,
"line_max": 130,
"alpha_frac": 0.5566292454,
"autogenerated": false,
"ratio": 4.27002427184466,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.016526346256443537,
"num_lines": 215
} |
from ..frontend import jit
import numpy as np
def CND(x):
"""
Simpler approximation of the cumulative normal density.
"""
a1 = 0.31938153
a2 = -0.356563782
a3 = 1.781477937
a4 = -1.821255978
a5 = 1.330274429
L = abs(x)
K = 1.0 / (1.0 + 0.2316419 * L)
w = 1.0 - 1.0/np.sqrt(2*3.141592653589793)* np.exp(-1*L*L/2.) * (a1*K +
a2*K*K + a3*K*K*K + a4*K*K*K*K + a5*K*K*K*K*K)
if x<0:
w = 1.0-w
return w
def erf(x):
return 2 * CND(x * 1.4142135623730951) - 1
def erfc(x):
return 2 * (1 - CND(x * 1.4142135623730951))
"""
P = np.asarray([
2.46196981473530512524E-10,
5.64189564831068821977E-1,
7.46321056442269912687E0,
4.86371970985681366614E1,
1.96520832956077098242E2,
5.26445194995477358631E2,
9.34528527171957607540E2,
1.02755188689515710272E3,
5.57535335369399327526E2
])
Q = np.asarray([
1.32281951154744992508E1,
8.67072140885989742329E1,
3.54937778887819891062E2,
9.75708501743205489753E2,
1.82390916687909736289E3,
2.24633760818710981792E3,
1.65666309194161350182E3,
5.57535340817727675546E2
])
R = np.asarray([
5.64189583547755073984E-1,
1.27536670759978104416E0,
5.01905042251180477414E0,
6.16021097993053585195E0,
7.40974269950448939160E0,
2.97886665372100240670E0
])
S = np.asarray([
2.26052863220117276590E0,
9.39603524938001434673E0,
1.20489539808096656605E1,
1.70814450747565897222E1,
9.60896809063285878198E0,
3.36907645100081516050E0
])
T = np.asarray([
9.60497373987051638749E0,
9.00260197203842689217E1,
2.23200534594684319226E3,
7.00332514112805075473E3,
5.55923013010394962768E4
])
U = np.asarray([
3.35617141647503099647E1,
5.21357949780152679795E2,
4.59432382970980127987E3,
2.26290000613890934246E4,
4.92673942608635921086E4
])
MAXLOG = 7.09782712893383996732E2
@jit
def polevl(x, coef):
ans = coef[0]
for i in range(len(coef) - 1, 1, -1):
ans = ans * x + coef[i]
return ans
@jit
def p1evl(x, coef):
ans = x + coef[0]
for i in range(len(coef) - 1, 1, -1):
ans = ans * x + coef[i]
return ans
@jit
def ndtr(a):
x = a * np.sqrt(2)
z = np.abs(x)
if z < np.sqrt(2):
y = 0.5 + 0.5 * erf(x)
else:
y = 0.5 * erfc(z)
if x > 0:
y = 1.0 - y
return y
def erfc(a):
if a < 0.0:
x = -a
else:
x = a
if x < 1.0:
return 1.0 - erf(a)
z = -a * a
if z < -MAXLOG:
if a < 0:
return 2.0
else:
return 0.0
z = np.exp(z)
if x < 8.0:
p = polevl(x, P)
q = p1evl(x, Q)
else:
p = polevl(x, R)
q = p1evl(x, S)
y = (z * p) / q
if a < 0:
y = 2.0 - y
return y
def erf(x):
if np.abs(x) > 1.0:
return 1.0 - erfc(x)
z = x * x
y = x * polevl(z, T, 4) / p1evl(z, U, 5)
return (y)
""" | {
"repo_name": "pombredanne/parakeet",
"path": "parakeet/lib/prob.py",
"copies": "2",
"size": "2741",
"license": "bsd-3-clause",
"hash": 182760853572861470,
"line_mean": 16.2452830189,
"line_max": 73,
"alpha_frac": 0.6256840569,
"autogenerated": false,
"ratio": 2.025868440502587,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.3651552497402587,
"avg_score": null,
"num_lines": null
} |
from ..frontend import jit
import numpy as np
@jit
def conjugate(x):
"""
For now we don't have complex numbers so this is just the identity function
"""
return x
@jit
def real(x):
"""
For now we don't have complex types, so real is just the identity function
"""
return x
def _scalar_sign(x):
if x > 0:
return 1
elif x < 0:
return -1
else:
return 0
@jit
def sign(x):
return map(_scalar_sign, x)
@jit
def reciprocal(x):
return 1 / x
@jit
def rad2deg(rad):
return rad * 180 / 3.141592653589793
@jit
def deg2rad(deg):
return deg * 3.141592653589793 / 180
@jit
def hypot(x,y):
return np.sqrt(x**2 + y**2)
@jit
def square(x):
return x * x
def _logaddexp_scalar(x, y):
"""
Copied from BioPython (http://biopython.org/)
"""
if x < y:
bigger = x
smaller = y
else:
bigger = x
smaller = y
diff = smaller - bigger
if diff < -100:
return bigger
return bigger + np.log1p(np.exp(diff))
@jit
def logaddexp(x, y):
return map(_logaddexp_scalar, x, y)
@jit
def log2_1p(x):
return (1.0 / np.log(2)) * np.log1p(x)
@jit
def logaddexp2(x, y):
diff = x - y
return np.where(diff > 0, x + log2_1p(2 ** -diff) , y + log2_1p(2 ** diff))
@jit
def true_divide(x, y):
"""
Not exactly true divide, since I guess it's sometimes supposed to stay an int
"""
return (x + 0.0) / (y + 0.0)
@jit
def floor_divide(x, y):
return np.floor(x / y)
| {
"repo_name": "pombredanne/parakeet",
"path": "parakeet/lib/math.py",
"copies": "2",
"size": "1470",
"license": "bsd-3-clause",
"hash": -3449270461884445000,
"line_mean": 14.3125,
"line_max": 79,
"alpha_frac": 0.5911564626,
"autogenerated": false,
"ratio": 2.6775956284153004,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8893087459018278,
"avg_score": 0.07513292639940417,
"num_lines": 96
} |
from FrontEnd.models import *
from django.contrib.auth.models import User
from rest_framework import serializers
from django.db.models import Sum
from datetime import datetime, date, timedelta
class InterstitialSerializer(serializers.ModelSerializer):
Company = serializers.PrimaryKeyRelatedField()
class Meta:
model = Intersticial
fields = ('id', 'Name', 'DisplayChance', 'Company', 'Url', 'AdClickUrl', 'Active')
class DomainSerializer(serializers.ModelSerializer):
Company = serializers.PrimaryKeyRelatedField()
Intersticial = InterstitialSerializer()
class Meta:
model = SupportedDomain
fields = ('id','Domain', 'Company', 'Intersticial')
class CompanySerializer(serializers.ModelSerializer):
Domains = DomainSerializer(many=True)
class Meta:
model = Company
fields = ('id', 'Name', 'PackageLevel', 'Domains')
class UserSerializer(serializers.ModelSerializer):
Username = serializers.Field('User.username')
Email = serializers.Field('User.email')
class Meta:
model = ExtendedUser
fields = ('id', 'Username', 'Email', 'IsAdmin', 'AccountActivated')
class AgentTypeSerializer(serializers.ModelSerializer):
class Meta:
model = LinkAgentType
fields = ('AgentType',)
class LanguageSerializer(serializers.ModelSerializer):
class Meta:
model = LinkLanguage
fields = ('Language',)
class LinkTagSerializer(serializers.ModelSerializer):
class Meta:
model = LinkTag
fields = ('Tag',)
class LinkSerializer(serializers.ModelSerializer):
Domain = serializers.PrimaryKeyRelatedField()
User = serializers.PrimaryKeyRelatedField()
Tags = serializers.SerializerMethodField('getTagArray')
LinkTitle = serializers.CharField(max_length=255, required=False)
def getTagArray(self, obj):
tags = []
for tag in obj.Tags.all():
tags.append(tag.Tag)
return tags
class Meta:
model = RedirectLink
fields = ('id', 'LinkTitle', 'UrlKey', 'RedirectUrl', 'Domain', 'User', 'Tags',)
class StatsSerializer(serializers.ModelSerializer):
AgentTypes = AgentTypeSerializer(many=True)
Languages = LanguageSerializer(many=True)
class Meta:
model = LinkStat
fields = ('Referer', 'TimeClicked', 'AgentTypes', 'Languages')
class ClickTotalSerializer(serializers.ModelSerializer):
TotalClicked = serializers.Field('TotalClicked')
class Meta:
model = LinkClickTotal
fields = ('TotalClicked', 'Date')
class DomainStatsSerializer(serializers.ModelSerializer):
Links = serializers.SerializerMethodField('get_num_links')
Clicks = serializers.SerializerMethodField('get_num_clicks')
UniqueVisitors = serializers.SerializerMethodField('get_num_unique_visitors')
class Meta:
model = SupportedDomain
fields = ('Links', 'Clicks', 'UniqueVisitors',)
def get_num_links(self, obj):
return obj.Links.count()
def get_num_clicks(self, obj):
return LinkClickTotal.objects.filter(Link__Domain=obj).aggregate(Sum('TotalClicked'))['TotalClicked__sum']
def get_num_unique_visitors(self, obj):
return LinkStat.objects.filter(Link__Domain=obj).values('IpAddress').distinct().count()
class RefererStatsSerializer(serializers.Serializer):
Referer = serializers.CharField(max_length=2000)
Clicks = serializers.IntegerField()
class LinkCountryStatsSerializer(serializers.Serializer):
Country = serializers.CharField(max_length=100)
CountryCode = serializers.CharField(max_length=3)
Clicks = serializers.IntegerField()
class DomainStatsSerializer2(serializers.ModelSerializer):
TotalClicks = serializers.SerializerMethodField('get_num_clicks')
CountriesReached = serializers.SerializerMethodField('get_unique_countries')
UniqueVisitors = serializers.SerializerMethodField('get_num_unique_visitors')
UniqueSources = serializers.SerializerMethodField('get_unique_sources')
class Meta:
model = SupportedDomain
fields = ('TotalClicks', 'CountriesReached', 'UniqueVisitors', 'UniqueSources',)
def get_num_clicks(self, obj):
return LinkClickTotal.objects.filter(Link__Domain=obj).filter(Date__gte=(date.today() - timedelta(days=1))).aggregate(Sum('TotalClicked'))['TotalClicked__sum']
def get_num_unique_visitors(self, obj):
return LinkStat.objects.filter(Link__Domain=obj).filter(TimeClicked__gte=(date.today() - timedelta(days=1))).values('IpAddress').distinct().count()
def get_unique_sources(self, obj):
return LinkStat.objects.filter(Link__Domain=obj).filter(TimeClicked__gte=(date.today() - timedelta(days=1))).values('Referer').distinct().count()
def get_unique_countries(self, obj):
return LinkStat.objects.filter(Link__Domain=obj).filter(TimeClicked__gte=(date.today() - timedelta(days=1))).values('CountryCode').distinct().count()
class PendingUserSerializer(serializers.ModelSerializer):
Company = CompanySerializer()
class Meta:
model = PendingUserRegistration
fields = ('id', 'Email', 'Company')
class CompanyInfoSerializer(serializers.ModelSerializer):
Users = UserSerializer(many=True)
Domains = DomainSerializer(many=True)
Intersticials = InterstitialSerializer(many=True)
PendingUsers = PendingUserSerializer(many=True)
class Meta:
model = Company
fields = ('Name', 'PackageLevel', 'Users', 'Domains', 'Intersticials', 'PendingUsers')
class InterstitialStatSerializer(serializers.ModelSerializer):
Intersticial = serializers.PrimaryKeyRelatedField()
Link = serializers.PrimaryKeyRelatedField()
TimeGathered = serializers.DateTimeField(required=False)
class Meta:
model = InterstitialStat
fields = ('Intersticial', 'Link', 'ActionTaken', 'TimeTaken', 'TimeGathered')
class OverallInterStatSerializer(serializers.ModelSerializer):
Intersticial = serializers.PrimaryKeyRelatedField()
class Meta:
model = AggregateInterstitialStat
fields = ('Intersticial', 'AdClicked', 'ButtonClicked', 'RedirectOcurred', 'AverageTimeTaken', 'Date')
class OverallInterStatAggregateSerializer(serializers.Serializer):
AdsClicked = serializers.IntegerField()
ButtonsClicked = serializers.IntegerField()
RedirectOcurred = serializers.IntegerField() | {
"repo_name": "BryceBrown/LinkstrDjango",
"path": "FrontEnd/serializers.py",
"copies": "1",
"size": "5957",
"license": "apache-2.0",
"hash": 8000516978742907000,
"line_mean": 33.8421052632,
"line_max": 161,
"alpha_frac": 0.7700184657,
"autogenerated": false,
"ratio": 3.661339889366933,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9813579346682207,
"avg_score": 0.023555801676945005,
"num_lines": 171
} |
from FrontEnd.models import *
from FrontEnd.serializers import *
from FrontEnd import utility
from django.contrib.auth.models import User
from django.shortcuts import get_object_or_404
from django.http import Http404, HttpResponse, HttpResponseRedirect
from rest_framework.authentication import SessionAuthentication, TokenAuthentication
from FrontEnd.permissions import *
from rest_framework import authentication, permissions, status, generics, mixins
from rest_framework.response import Response
from rest_framework.renderers import JSONRenderer
from rest_framework.views import APIView
from datetime import datetime, date, timedelta
from django.db.models import Count
from django.db.models import Q
from django.contrib.auth import authenticate
import json
import urllib2
from BeautifulSoup import BeautifulSoup
class DomainList(APIView):
#Give back a list of domains that user has access to
def get(self, request, format=None):
if not request.user.is_authenticated():
return HttpResponse('', status=401)
userExt = request.user.ExtUser
domains = SupportedDomain.objects.filter(Q(Company=userExt.Company))
serializer = DomainSerializer(domains, many=True)
return Response(serializer.data)
#for creating and updating Domains
def post(self, request, format=None):
if not request.user.is_authenticated():
return HttpResponse('', status=401)
dData = request.DATA.copy()
dData['Company'] = request.user.ExtUser.Company.id
if 'id' in dData:
domain = get_object_or_404(SupportedDomain, pk=dData['id'])
if domain.Company != request.user.ExtUser.Company:
return HttpResponse('', status=404)
serializer = DomainSerializer(domain, dData)
if serializer.is_valid():
serializer.save()
interData = dData['Intersticial']
if interData:
inter = get_object_or_404(Intersticial, pk=interData['id'])
if inter.Company != domain.Company:
return HttpResponse('', status=401)
if domain.Intersticial != inter:
domain.Intersticial = inter
serializer = DomainSerializer(domain)
else:
domain.Intersticial = None;
domain.save()
return Response(serializer.data)
else:
return Response(serializer.errors)
else:
serializer = DomainSerializer(dData)
if serializer.is_valid():
if 'goli.us' in dData['Domain']:
return HttpResponse('', status=401)
serializer.save()
interData = dData['Intersticial']
if interData:
inter = get_object_or_404(Intersticial, pk=interData['id'])
nDomain = serializer.object
if inter.Company != nDomain.Company:
return HttpResponse('', status=401)
if nDomain.Intersticial != inter:
nDomain.Intersticial = inter
nDomain.save()
serializer = DomainSerializer(nDomain)
return Response(serializer.Data)
else:
return Response(serializer.errors)
class DomainNode(APIView):
def get(self, request, pk, format=None):
if not request.user.is_authenticated():
return HttpResponse('', status=401)
domain = get_object_or_404(SupportedDomain, pk=pk)
if domain.Company != request.ExtUser.Company:
return HttpResponse('', status=401)
serializer = DomainSerializer(domain)
return Response(serializer.data)
class RedirectUrlsForDomain(APIView):
#TODO Sort by newest first
def get(self, request, pk, format=None):
if not request.user.is_authenticated():
return HttpResponse('', status=401)
userExt = request.user.ExtUser
domain = get_object_or_404(SupportedDomain, pk=pk)
if userExt.Company != domain.Company and domain.Domain != 'goli.us':
return HttpResponse('', status=401)
links = RedirectLink.objects.filter(Domain=domain).filter(IsActive=True)
if domain.Domain == 'goli.us':
links = links.filter(User=userExt)
if 'q' in request.GET:
q = request.GET['q']
links = links.filter(Q(RedirectUrl__contains=q) | Q(LinkTitle__contains=q))
links = links.order_by('-TimeGenerated')
serializer = LinkSerializer(links, many=True)
return Response(serializer.data)
class SingleRedirectUrl(APIView):
def get(self, request, pk, format=None):
if not request.user.is_authenticated():
return HttpResponse('', status=401)
link = get_object_or_404(RedirectLink, pk=pk)
if link.User.Company != request.user.ExtUser.Company:
return HttpResponse('', status=401)
serializer = LinkSerializer(link)
return Response(serializer.data)
def delete(self, request, pk, format=None):
if not request.user.is_authenticated():
return HttpResponse('', status=401)
link = get_object_or_404(RedirectLink, pk=pk)
if link.User.Company != request.user.ExtUser.Company:
return HttpResponse('', status=401)
link.IsActive = False
link.save()
return Response(status=status.HTTP_204_NO_CONTENT)
class RedirectUrl(APIView):
def get(self, request, format=None):
if not request.user.is_authenticated():
return HttpResponse('', status=401)
links = RedirectLink.objects.filter(User=request.user.ExtUser)
serializer = LinkSerializer(links)
return Response(serializer.data)
def post(self, request, *args, **kwargs):
if not request.user.is_authenticated():
return HttpResponse('', status=401)
mData = request.DATA.copy()
hasDomain = False
userExt = request.user.ExtUser
#If there are no domains in the request
if SupportedDomain.objects.get(id=mData['Domain']).Company != userExt.Company:
return HttpResponse('', status=401)
#If the link has already been generated, give that object
prevLink = RedirectLink.objects.filter(RedirectUrl=mData['RedirectUrl']).filter(Domain__id=mData['Domain'])
if hasDomain:
prevLink = prevLink.filter(User=userExt)
if prevLink.count() > 0:
if not prevLink[0].IsActive:
prevLink[0].IsActive = True
prevLink[0].save()
serializer = LinkSerializer(prevLink[0])
return Response(serializer.data)
#Grab the ExtUserid and new unique URL Key
mData['User'] = userExt.id
if 'UrlKey' in mData:
prevLink = RedirectLink.objects.filter(Domain__id=mData['Domain']).filter(UrlKey=mData['UrlKey'])
if prevLink.count() > 0:
#Link has been used for this domain, return
return HttpResponse('', status=400)
else:
mData['UrlKey'] = utility.getUniqueRedirectKeyForDomain(mData['Domain'])
#Serialize and send the response
try:
title = BeautifulSoup(urllib2.urlopen(mData['RedirectUrl'])).title.string
mData['LinkTitle'] = title
except Exception, e:
mData['LinkTitle'] = ''
print "Error getting link title"
print e
serializer = LinkSerializer(data=mData)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
else:
return Response(serializer.errors, status=400)
def getLinkStats(self, request, pk, dateStart=date(1970,1,1), format=None):
if not request.user.is_authenticated():
return HttpResponse('', status=401)
link = get_object_or_404(RedirectLink, pk=pk)
if link.User != request.user.ExtUser:
return HttpResponse('', status=401)
stats = LinkClickTotal.objects.filter(Link=link).filter(Date__gte=dateStart)
clicks = LinkStat.objects.filter(Link=link).filter(TimeClicked__gte=dateStart)
countryClicks = clicks.values('Country','CountryCode').annotate(Clicks=Count('CountryCode'))
countrySerializer = LinkCountryStatsSerializer(countryClicks, many=True)
refererClicks = clicks.values('Referer').annotate(Clicks=Count('Referer'))
refererSerializer = RefererStatsSerializer(refererClicks, many=True)
agentTypes = LinkAgentType.objects.filter(Stat__Link=link).filter(Stat__TimeClicked__gte=dateStart)
browsers = agentTypes.values('Browser').annotate(count=Count('Browser'))
operatingSystems = agentTypes.values('OS').annotate(count=Count('OS'))
devices = agentTypes.values('Device').annotate(count=Count('Device'))
clickSerializer = ClickTotalSerializer(stats, many=True)
nDict = {
'Clicks': clickSerializer.data,
'Referers': refererSerializer.data,
'Countries': countrySerializer.data,
'Browsers': browsers,
'OS': operatingSystems,
'Devices': devices
}
return Response(nDict)
class LinkStatistics(APIView):
def get(self, request, pk, format=None):
return getLinkStats(self, request, pk, format=format)
class MonthLinkStatistics(APIView):
def get(self, request, pk, format=None):
return getLinkStats(self, request, pk, utility.monthdelta(datetime.now(), -1), format)
class ThreeMonthLinkStatistics(APIView):
def get(self, request, pk, format=None):
return getLinkStats(self, request, pk, utility.monthdelta(datetime.now(), -3), format)
class DomainStats(APIView):
def get(self, request, pk, format=None):
if not request.user.is_authenticated():
return HttpResponse('', status=401)
domain = get_object_or_404(SupportedDomain, pk=pk)
if domain.Domain == 'goli.us':
statDict = {
'TotalClicks': self.get_num_clicks(domain, request.user.ExtUser),
'CountriesReached': self.get_unique_countries(domain, request.user.ExtUser),
'UniqueVisitors': self.get_num_unique_visitors(domain, request.user.ExtUser),
'UniqueSources': self.get_unique_sources(domain, request.user.ExtUser)
}
return Response(statDict)
else:
if request.user.ExtUser.Company != domain.Company:
return HttpResponse('', status=403)
serializer = DomainStatsSerializer2(domain)
return Response(serializer.data)
def get_num_clicks(self, obj, user):
return LinkClickTotal.objects.filter(Link__Domain=obj).filter(Link__User=user).filter(Date__gte=(date.today() - timedelta(days=1))).aggregate(Sum('TotalClicked'))['TotalClicked__sum']
def get_num_unique_visitors(self, obj, user):
return LinkStat.objects.filter(Link__Domain=obj).filter(Link__User=user).filter(TimeClicked__gte=(date.today() - timedelta(days=1))).values('IpAddress').distinct().count()
def get_unique_sources(self, obj, user):
return LinkStat.objects.filter(Link__Domain=obj).filter(Link__User=user).filter(TimeClicked__gte=(date.today() - timedelta(days=1))).values('Referer').distinct().count()
def get_unique_countries(self, obj, user):
return LinkStat.objects.filter(Link__Domain=obj).filter(Link__User=user).filter(TimeClicked__gte=(date.today() - timedelta(days=1))).values('CountryCode').distinct().count()
class CompanySerializer(APIView):
def get(self, request, format=None):
if not request.user.is_authenticated():
return HttpResponse('', status=401)
company = request.user.ExtUser.Company
serializer = CompanyInfoSerializer(company)
return Response(serializer.data)
#just returns currently logged in user
class MeSerializer(APIView):
def get(self, request, format=None):
if not request.user.is_authenticated():
return HttpResponse('', status=401)
serializer = UserSerializer(request.user.ExtUser)
return Response(serializer.data)
class InterstitialSingle(generics.RetrieveUpdateDestroyAPIView):
queryset = Intersticial.objects.all()
serializer_class = InterstitialSerializer
permission_classes = (IsCompanies, IsAuthenticated, )
def pre_save(self, obj):
obj.Company = self.request.user.ExtUser.Company
class InterstitialList(APIView):
def get(self, request, format=None):
if not request.user.is_authenticated():
return HttpResponse('', status=401)
interstitials = Intersticial.objects.filter(Company=request.user.ExtUser.Company)
serializer = InterstitialSerializer(interstitials, many=True)
return Response(serializer.data)
def post(self, request, *args, **kwargs):
if not request.user.is_authenticated():
return HttpResponse('', status=401)
cpy = request.DATA.copy()
cpy['Company'] = request.user.ExtUser.Company.id
serializer = InterstitialSerializer(data=cpy)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
else:
return Response(serializer.errors, status=400)
class LoginSerializer(APIView):
def get(self, request, format=None):
if not ('username' in request.GET) or not ('password' in request.GET):
return HttpResponse('', status=403)
user = authenticate(username=request.GET['username'],password=request.GET['password'])
if user is not None:
token, created = Token.objects.get_or_create(user=user)
data = {
'token':token.key
}
return Response(data)
else:
return HttpResponse('', status=401)
class AnonUrl(APIView):
def post(self, request, format=None):
mData = request.DATA.copy()
mData['Domain'] = SupportedDomain.objects.get(Domain='t.goli.us').id
prevLink = RedirectLink.objects.filter(RedirectUrl=mData['RedirectUrl']).filter(Domain__id=mData['Domain'])
if prevLink.count() > 0:
serializer = LinkSerializer(prevLink[0])
return Response(serializer.data)
mData['User'] = User.objects.get(username='System').ExtUser.id
if 'UrlKey' in mData:
prevLink = RedirectLink.objects.filter(Domain__id=mData['Domain']).filter(UrlKey=mData['UrlKey'])
if prevLink.count() > 0:
#Link has been used for this domain, return
return HttpResponse('', status=400)
else:
mData['UrlKey'] = utility.getUniqueRedirectKeyForDomain(mData['Domain'])
title = ''
try:
title = BeautifulSoup(urllib2.urlopen(mData['RedirectUrl'])).title.string
except Exception, e:
print "Error getting link title"
print e
mData['LinkTitle'] = title
serializer = LinkSerializer(data=mData)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
else:
return Response(serializer.errors, status=400)
class InterstitialStatView(APIView):
def get(self, request, format=None):
mData = {}
mData['ActionTaken'] = request.GET['action_taken']
mData['Link'] = request.GET['linkid']
mData['Intersticial'] = request.GET['inter_id']
mData['TimeTaken'] = request.GET['time_taken']
serializer = InterstitialStatSerializer(data=mData)
if serializer.is_valid():
serializer.save()
stitial = serializer.object
#save aggregate stat for interstitiial
aggrStats = AggregateInterstitialStat.objects.filter(Intersticial=stitial.Intersticial).filter(Date=date.today())
if len(aggrStats) != 0:
aggrStat = aggrStats[0]
else:
aggrStat = AggregateInterstitialStat(Intersticial=stitial.Intersticial, Date=date.today())
aggrStat.incrementAction(stitial.ActionTaken, stitial.TimeTaken)
aggrStat.save()
return Response('', status=200)
else:
return Response(serializer.errors, status=400)
class DomainIntetstitialStat(APIView):
def get(self, request, pk, format=None):
if not request.user.is_authenticated():
return HttpResponse('', status=401)
inter = get_object_or_404(Intersticial, pk=pk)
if inter.Company != request.user.ExtUser.Company:
return HttpResponse('', status=401)
stats = InterstitialStat.objects.filter(Intersticial=inter).filter(TimeGathered__gte=datetime.fromtimestamp(int(request.GET['from'])))
serializer = InterstitialStatSerializer(stats, many=True)
return Response(serializer.data)
class OverallInterstitialStat(APIView):
def get(self, request, pk, format=None):
if not request.user.is_authenticated():
return HttpResponse('', status=401)
inter = get_object_or_404(Intersticial, pk=pk)
if inter.Company != request.user.ExtUser.Company:
return HttpResponse('', status=401)
stats = AggregateInterstitialStat.objects.filter(Intersticial=inter).filter(Date__gte=datetime.fromtimestamp(int(request.GET['from'])))
aggrData = {
'AdsClicked': 0,
'ButtonsClicked': 0,
'RedirectOcurred': 0
}
for stat in stats:
aggrData['AdsClicked'] += stat.AdClicked
aggrData['ButtonsClicked'] += stat.ButtonClicked
aggrData['RedirectOcurred'] += stat.RedirectOcurred
serializer = OverallInterStatAggregateSerializer(aggrData)
return Response(serializer.data) | {
"repo_name": "BryceBrown/LinkstrDjango",
"path": "FrontEnd/api.py",
"copies": "1",
"size": "15613",
"license": "apache-2.0",
"hash": 8723561267864894000,
"line_mean": 35.3962703963,
"line_max": 185,
"alpha_frac": 0.73675783,
"autogenerated": false,
"ratio": 3.387502712085051,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9442032897726182,
"avg_score": 0.03644552887177376,
"num_lines": 429
} |
from frontend.stmt_inferrer import *
import ast
import time
r = open("tests/inference/example.py")
t = ast.parse(r.read())
r.close()
solver = z3_types.TypesSolver(t)
context = Context()
solver.infer_stubs(context, infer)
for stmt in t.body:
infer(stmt, context, solver)
solver.push()
def print_solver(z3solver):
printer = z3_types.z3printer
printer.set_pp_option('max_lines', 4000)
printer.set_pp_option('max_width', 120)
printer.set_pp_option('max_visited', 10000000)
printer.set_pp_option('max_depth', 1000000)
printer.set_pp_option('max_args', 512)
printer.pp(z3solver)
def print_context(ctx, ind=""):
for v in sorted(ctx.types_map):
z3_t = ctx.types_map[v]
if isinstance(z3_t, (Context, AnnotatedFunction)):
continue
print(ind + "{}: {}".format(v, model[z3_t]))
if ctx.has_context_in_children(v):
print_context(ctx.get_context_from_children(v), "\t" + ind)
if not ind:
print("---------------------------")
children = False
for child in ctx.children_contexts:
if ctx.name == "" and child.name == "":
children = True
print_context(child, "\t" + ind)
if not ind and children:
print("---------------------------")
start_time = time.time()
check = solver.optimize.check()
end_time = time.time()
if check == z3_types.unsat:
print("Check: unsat")
solver.check(solver.assertions_vars)
print(solver.unsat_core())
print([solver.assertions_errors[x] for x in solver.unsat_core()])
else:
model = solver.optimize.model()
print_context(context)
print("Ran in {} seconds".format(end_time - start_time))
| {
"repo_name": "gitsimon/spadup-lyra",
"path": "tests/inference_runner.py",
"copies": "1",
"size": "1693",
"license": "mpl-2.0",
"hash": 2065677412989154300,
"line_mean": 26.3064516129,
"line_max": 71,
"alpha_frac": 0.6107501477,
"autogenerated": false,
"ratio": 3.2557692307692307,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9366519378469231,
"avg_score": 0,
"num_lines": 62
} |
from frontend.z3_types import And, Or, Implies, Not
def add(left, right, result, types):
"""Constraints for the addition operation
Cases:
- Number_1 + Number_2 --> Stronger(Number_1, Number_2)
- Sequence + Sequence --> Sequence
Ex:
- 1 + 2.0
- [1, 2, 3] + [4]
- "string" + "string2"
TODO: Tuples addition
"""
return [
Or(
And(types.subtype(left, types.seq), left == right, left == result),
And(types.subtype(left, types.num), types.subtype(right, left), result == left),
And(types.subtype(right, types.num), types.subtype(left, right), result == right),
# result from list addition is a list with a supertype of operands' types
And(left == types.list(types.list_type(left)),
right == types.list(types.list_type(right)),
result == types.list(types.list_type(result)),
types.subtype(types.list_type(left), types.list_type(result)),
types.subtype(types.list_type(right), types.list_type(result)),
),
And(left == types.string, right == types.string, result == types.string)
),
]
def mult(left, right, result, types):
"""Constraints for the multiplication operation
Cases:
- Number_1 * Number_2 --> Stronger(Number_1, Number_2)
- Int * Sequence --> Sequence
- Sequence * Int --> Sequence
Ex:
- 1 * 2.0
- 3 * [1, 2]
- b"string" * 4
"""
return [
Or(
# multiplication of two booleans is an integer. Handle it separately
And(left == types.bool, right == types.bool, result == types.int),
And(Or(left != types.bool, right != types.bool),
Or(
And(types.subtype(left, types.seq), types.subtype(right, types.int), result == left),
And(types.subtype(left, types.int), types.subtype(right, types.seq), result == right),
And(types.subtype(left, types.num), types.subtype(right, left), result == left),
And(types.subtype(right, types.num), types.subtype(left, right), result == right),
)
)
)
]
def div(left, right, result, types):
"""Constraints for the division operation
Cases:
- Number_1 / Number_2 --> Stronger(types.float, Stronger(Number_1, Number2))
Ex:
- True / 7
- 3 / (1 + 2j)
"""
return [
And(types.subtype(left, types.num), types.subtype(right, types.num)),
Implies(Or(left == types.complex, right == types.complex), result == types.complex),
Implies(Not(Or(left == types.complex, right == types.complex)), result == types.float)
]
def arithmetic(left, right, result, is_mod, types):
"""Constraints for arithmetic operation
Cases:
- Number_1 (op) Number_2 --> Stronger(Number_1, Number_2)
- String formatting
Ex:
- 2 ** 3.0
- 3 - 4
- "Case #%i: %i" % (u, v)
"""
axioms = [
And(types.subtype(left, types.num), types.subtype(right, left), result == left),
And(types.subtype(right, types.num), types.subtype(left, right), result == right),
]
if is_mod:
axioms += [And(Or(left == types.string, left == types.bytes), result == left)]
return [Or(axioms)]
def bitwise(left, right, result, types):
"""Constraints for arithmetic operation
Cases:
- (Number_1: Int/Bool) (op) (Number_2: Int/Bool) --> Stronger(Number_1, Number_2)
Ex:
- 1 & 2
- True ^ False
"""
return arithmetic(left, right, result, False, types) + [
And(types.subtype(left, types.int), types.subtype(right, types.int))]
def bool_op(values, result, types):
"""Constrains for boolean operations (and/or)
The result is the supertype (or numerically stronger) of all operands.
Ex:
- 2 and str --> object
- False or 1 --> int
"""
return [types.subtype(x, result) for x in values]
def unary_invert(unary, types):
"""Constraints for the invert unary operation
Only subtypes for int are eligible for this operation (No floats)
Ex:
- ~231
"""
return [
types.subtype(unary, types.int)
]
def unary_other(unary, result, types):
"""Constraints for any unary operation except (~) and (not)
Cases:
- (op) Number --> Stronger(Int, Number)
Ex:
- -True
- +2.0
"""
return [
types.subtype(unary, types.num),
Implies(unary == types.bool, result == types.int),
Implies(unary != types.bool, result == unary)
]
def if_expr(a, b, result, types):
"""Constraints for if expressions
Cases:
- (a) if (TEST) else (b) --> Super(a, b)
"""
return [
types.subtype(a, result),
types.subtype(b, result)
]
def index(indexed, ind, result, types):
"""Constraints for index subscript
Cases:
- List[t] --> t
- str --> str
- bytes --> bytes
- Dict{t1: t2} --> t2
- Tuple(t1, t2, t3, ..., tn) --> Super(t1, t2, t3, ..., tn)
"""
# Tuple indexing
# Assert that 'indexed' can be a tuple of an arbitrary length, where the result is the super-type of its elements.
t = []
for cur_len in range(1, len(types.tuples)):
tuple_args = [getattr(types.type_sort, "tuple_{}_arg_{}".format(cur_len, i + 1))(indexed)
for i in range(cur_len)]
t.append(And(
indexed == types.tuples[cur_len](*tuple_args),
*[types.subtype(x, result) for x in tuple_args]
))
return [
Or(
[indexed == types.dict(ind, result),
And(types.subtype(ind, types.int), indexed == types.list(result)),
And(types.subtype(ind, types.int), indexed == types.string, result == types.string),
And(types.subtype(ind, types.int), indexed == types.bytes, result == types.bytes),
]
+ t
)
]
def slicing(lower, upper, step, sliced, result, types):
"""Constraints for slicing subscript
Cases:
- Sequence --> Sequence
"""
return [
And(types.subtype(lower, types.int), types.subtype(upper, types.int), types.subtype(step, types.int),
Or(
sliced == types.string,
sliced == types.bytes,
types.subtype(sliced, types.tuple),
sliced == types.list(types.list_type(sliced))
), result == sliced)
]
def generator(iterable, target, types):
"""Constraints for comprehension generators
Ex:
- [x for x in [1, 2]]
- [x for x in {1, 2}]
- [x for y in ["st", "st"] for x in y]
- [x for x in {1: "a", 2: "b"}]
"""
# TODO tuples
return [
Or(
iterable == types.list(target),
iterable == types.set(target),
And(iterable == types.string, target == types.string),
And(iterable == types.bytes, target == types.bytes),
iterable == types.dict(target, types.dict_value_type(iterable)),
)
]
def assignment(target, value, types):
"""Constraints for variable assignment.
The left hand side is either a super type or a numerically stronger type of the right hand side.
"""
return [
types.subtype(value, target)
]
def subscript_assignment(target, types):
"""Constraints for subscript assignment
Cases:
- Index assignment
- Slice assignment
strings, bytes and tuples are immutable objects. i.e., they don't support subscript assignments
"""
return [
target != types.string,
target != types.bytes,
Not(types.subtype(target, types.tuple))
]
def delete_subscript(indexed, types):
"""Constraints for subscript deletion
Prevent subscript deletion of tuples, strings and bytes (Immutable sequences)
"""
return [
Not(Or(
indexed == types.string,
indexed == types.bytes,
types.subtype(indexed, types.tuple)
))
]
def body(result, new, types):
"""Constraints for body statements
The body type is the super-type of all its statements, or none if no statement returns type.
"""
return [
Implies(new != types.none, result == new)
]
def control_flow(then, orelse, result, types):
"""Constraints for control-flow blocks (if/else, while, for)"""
# TODO numeric casting
return [
Implies(orelse == types.none, result == then),
Implies(orelse != types.none, And(
types.subtype(then, result),
types.subtype(orelse, result)
))
]
def for_loop(iterable, target, types):
"""Constraints for for-loop iterable and iteration target"""
return [
Or(
iterable == types.list(target),
iterable == types.set(target),
iterable == types.dict(target, types.dict_value_type(iterable)),
And(iterable == types.string, target == types.string),
And(iterable == types.bytes, target == types.bytes)
)
]
def try_except(then, orelse, final, result, types):
"""Constraints for try/except block"""
return [
types.subtype(then, result),
types.subtype(orelse, result),
types.subtype(final, result)
]
def one_type_instantiation(class_name, args, result, types):
"""Constraints for class instantiation, if the class name is known
:param class_name: The class to be instantiated
:param args: the types of the arguments passed to the class instantiation
:param result: The resulting instance from instantiation
:param types: Z3Types object for this inference program
"""
init_args_count = types.class_to_init_count[class_name]
# Get the instance accessor from the type_sort data type.
instance = getattr(types.type_sort, "instance")(types.all_types[class_name])
# Get the __init__ function of the this class
init_func = types.instance_attributes[class_name]["__init__"]
# Assert that it's a call to this __init__ function
# Get the default args count
defaults_accessor = getattr(types.type_sort, "func_{}_defaults_args".format(init_args_count))
default_count = defaults_accessor(init_func)
rem_args_count = init_args_count - len(args) - 1
rem_args = []
for i in range(rem_args_count):
arg_idx = len(args) + i + 2
# Get the default arg type
arg_accessor = getattr(types.type_sort, "func_{}_arg_{}".format(init_args_count, arg_idx))
rem_args.append(arg_accessor(init_func))
all_args = (instance,) + args + tuple(rem_args) + (types.none,) # The return type of __init__ is None
z3_func_args = (default_count,) + all_args
# Assert that it's a call to this __init__ function
return And(
result == instance,
init_func == types.funcs[len(args) + len(rem_args) + 1](z3_func_args), default_count >= rem_args_count)
def instance_axioms(called, args, result, types):
"""Constraints for class instantiation
A class instantiation corresponds to a normal function call to the __init__ function, where
the return type will be an instance of this class.
The called maybe of any user-defined type in the program, so the call is asserted
with the __init__ function of every call
"""
if len(args) + 1 >= len(types.funcs): # Instantiating a class with more number of args than the max possible number
return []
# Assert with __init__ function of all classes in the program
axioms = []
for t in types.all_types:
axioms.append(And(one_type_instantiation(t, args, result, types),
called == types.all_types[t]))
return axioms
def function_call_axioms(called, args, result, types):
"""Constraints for function calls
To support default arguments values, an axiom for every possible arguments length is added, provided that the
defaults count for the function matches the inferred one.
"""
axioms = []
for i in range(len(args), len(types.funcs)): # Only assert with functions with length >= call arguments length
rem_args = i - len(args) # The remaining arguments are expected to have default value in the func definition.
if rem_args > types.config.max_default_args:
break
rem_args_types = ()
for j in range(rem_args):
arg_idx = len(args) + j + 1
arg_accessor = getattr(types.type_sort, "func_{}_arg_{}".format(i, arg_idx)) # Get the default arg type
rem_args_types += (arg_accessor(called),)
# Get the default args count accessor
defaults_accessor = getattr(types.type_sort, "func_{}_defaults_args".format(i))
defaults_count = defaults_accessor(called)
# Add the axioms for function call, default args count, and arguments subtyping.
axioms.append(And(called == types.funcs[i]((defaults_accessor(called),) + tuple(args) + rem_args_types + (result,)),
defaults_count >= rem_args,
defaults_count <= types.config.max_default_args))
return axioms
def call(called, args, result, types):
"""Constraints for calls
Cases:
- Function call
- Class instantiation
"""
return [
Or(
function_call_axioms(called, args, result, types) + instance_axioms(called, args, result, types)
)
]
def attribute(instance, attr, result, types):
"""Constraints for attribute access
Assert with all classes having the attribute attr
"""
axioms = []
for t in types.all_types:
if attr in types.instance_attributes[t]:
# instance access. Ex: A().x
type_instance = getattr(types.type_sort, "instance")(types.all_types[t])
attr_type = types.instance_attributes[t][attr]
axioms.append(And(instance == type_instance, result == attr_type))
if attr in types.class_attributes[t]:
# class access. Ex: A.x
class_type = types.all_types[t]
attr_type = types.class_attributes[t][attr]
axioms.append(And(instance == class_type, result == attr_type))
return Or(axioms)
| {
"repo_name": "gitsimon/spadup-lyra",
"path": "frontend/z3_axioms.py",
"copies": "1",
"size": "14595",
"license": "mpl-2.0",
"hash": -6778523597855774000,
"line_mean": 31.578125,
"line_max": 124,
"alpha_frac": 0.58218568,
"autogenerated": false,
"ratio": 3.85193982581156,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.493412550581156,
"avg_score": null,
"num_lines": null
} |
from frontera.core.components import Middleware
from frontera.exceptions import NotConfigured
from frontera.utils.url import canonicalize_url
from frontera.utils.misc import load_object
class BaseFingerprintMiddleware(Middleware):
component_name = 'Base Fingerprint Middleware'
fingerprint_function_name = ''
def __init__(self, manager):
fingerprint_function_name = manager.settings.get(self.fingerprint_function_name, None)
if not fingerprint_function_name:
raise NotConfigured
self.fingerprint_function = load_object(fingerprint_function_name)
@classmethod
def from_manager(cls, manager):
return cls(manager)
def frontier_start(self):
pass
def frontier_stop(self):
pass
def add_seeds(self, seeds):
for seed in seeds:
self._add_fingerprint(seed)
return seeds
def page_crawled(self, response, links):
for link in links:
self._add_fingerprint(link)
return self._add_fingerprint(response)
def request_error(self, request, error):
return self._add_fingerprint(request)
def _add_fingerprint(self, obj):
raise NotImplementedError
class UrlFingerprintMiddleware(BaseFingerprintMiddleware):
"""
This :class:`Middleware <frontera.core.components.Middleware>` will add a ``fingerprint`` field for every
:attr:`Request.meta <frontera.core.models.Request.meta>` and
:attr:`Response.meta <frontera.core.models.Response.meta>` if is activated.
Fingerprint will be calculated from object ``URL``, using the function defined in
:setting:`URL_FINGERPRINT_FUNCTION` setting.
You can write your own fingerprint calculation function and use by changing this setting.
An example for a :class:`Request <frontera.core.models.Request>` object::
>>> request.url
'http//www.scrapinghub.com:8080'
>>> request.meta['fingerprint']
'60d846bc2969e9706829d5f1690f11dafb70ed18'
"""
component_name = 'URL Fingerprint Middleware'
fingerprint_function_name = 'URL_FINGERPRINT_FUNCTION'
def _get_fingerprint(self, url):
return self.fingerprint_function(canonicalize_url(url))
def _add_fingerprint(self, obj):
obj.meta['fingerprint'] = self._get_fingerprint(obj.url)
if 'redirect_urls' in obj.meta:
obj.meta['redirect_fingerprints'] = [self._get_fingerprint(url) for url in obj.meta['redirect_urls']]
return obj
class DomainFingerprintMiddleware(BaseFingerprintMiddleware):
"""
This :class:`Middleware <frontera.core.components.Middleware>` will add a ``fingerprint`` field for every
:attr:`Request.meta <frontera.core.models.Request.meta>` and
:attr:`Response.meta <frontera.core.models.Response.meta>` ``domain`` fields if is activated.
Fingerprint will be calculated from object ``URL``, using the function defined in
:setting:`DOMAIN_FINGERPRINT_FUNCTION` setting.
You can write your own fingerprint calculation function and use by changing this setting.
An example for a :class:`Request <frontera.core.models.Request>` object::
>>> request.url
'http//www.scrapinghub.com:8080'
>>> request.meta['domain']
{
"fingerprint": "5bab61eb53176449e25c2c82f172b82cb13ffb9d",
"name": "scrapinghub.com",
"netloc": "www.scrapinghub.com",
"scheme": "http",
"sld": "scrapinghub",
"subdomain": "www",
"tld": "com"
}
"""
component_name = 'Domain Fingerprint Middleware'
fingerprint_function_name = 'DOMAIN_FINGERPRINT_FUNCTION'
def _add_fingerprint(self, obj):
if 'domain' in obj.meta and 'name' in obj.meta['domain']:
obj.meta['domain']['fingerprint'] = self.fingerprint_function(obj.meta['domain']['name'])
if 'redirect_domains' in obj.meta:
for domain in obj.meta['redirect_domains']:
domain['fingerprint'] = self.fingerprint_function(domain['name'])
return obj
| {
"repo_name": "TeamHG-Memex/frontera",
"path": "frontera/contrib/middlewares/fingerprint.py",
"copies": "1",
"size": "4085",
"license": "bsd-3-clause",
"hash": -7756593196304651000,
"line_mean": 34.8333333333,
"line_max": 113,
"alpha_frac": 0.6702570379,
"autogenerated": false,
"ratio": 3.966019417475728,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.001001891216197566,
"num_lines": 114
} |
from frontera.core import OverusedBuffer
from frontera.core.models import Request
r1 = Request('http://www.example.com')
r2 = Request('http://www.example.com/some/')
r3 = Request('htttp://www.example.com/some/page/')
r4 = Request('http://example.com')
r5 = Request('http://example.com/some/page')
r6 = Request('http://example1.com')
class TestOverusedBuffer(object):
requests = []
logs = []
def get_func(self, max_n_requests, **kwargs):
lst = []
for _ in range(max_n_requests):
if self.requests:
lst.append(self.requests.pop())
return lst
def log_func(self, msg):
self.logs.append(msg)
def test(self):
ob = OverusedBuffer(self.get_func, self.log_func)
self.requests = [r1, r2, r3, r4, r5, r6]
assert set(ob.get_next_requests(10, overused_keys=['www.example.com', 'example1.com'],
key_type='domain')) == set([r4, r5])
assert set(self.logs) == set(["Overused keys: ['www.example.com', 'example1.com']",
"Pending: 0"])
self.logs = []
assert ob.get_next_requests(10, overused_keys=['www.example.com'],
key_type='domain') == [r6]
assert set(self.logs) == set(["Overused keys: ['www.example.com']",
"Pending: 4"])
self.logs = []
assert ob.get_next_requests(10, overused_keys=['www.example.com'],
key_type='domain') == []
assert set(self.logs) == set(["Overused keys: ['www.example.com']",
"Pending: 3"])
self.logs = []
#the max_next_requests is 3 here to cover the "len(requests) == max_next_requests" case.
assert set(ob.get_next_requests(3, overused_keys=['example.com'],
key_type='domain')) == set([r1, r2, r3])
assert set(self.logs) == set(["Overused keys: ['example.com']",
"Pending: 3"])
self.logs = []
assert ob.get_next_requests(10, overused_keys=[], key_type='domain') == []
assert set(self.logs) == set(["Overused keys: []", "Pending: 0"])
| {
"repo_name": "TeamHG-Memex/frontera",
"path": "frontera/tests/test_core_overused_buffer.py",
"copies": "1",
"size": "2275",
"license": "bsd-3-clause",
"hash": -2745170546972579000,
"line_mean": 38.9122807018,
"line_max": 96,
"alpha_frac": 0.5195604396,
"autogenerated": false,
"ratio": 3.651685393258427,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4671245832858427,
"avg_score": null,
"num_lines": null
} |
from frontera.core.manager import FrontierManager
from converters import BaseRequestConverter, BaseResponseConverter
class FrontierManagerWrapper(object):
def __init__(self, settings, manager=None):
manager = manager or FrontierManager
self.manager = manager.from_settings(settings)
def start(self):
if not hasattr(self, 'request_converter'):
raise NotImplementedError("Request converter should be instantiated in subclass")
if not hasattr(self, 'response_converter'):
raise NotImplementedError("Response converter should be instantiated in subclass")
assert isinstance(self.request_converter, BaseRequestConverter), 'request_converter ' \
'must be instance of BaseRequestConverter'
assert isinstance(self.response_converter, BaseResponseConverter), 'response_converter ' \
'must be instance of BaseResponseConverter'
self.manager.start()
def stop(self):
self.manager.stop()
def add_seeds(self, seeds):
frontier_seeds = [self.request_converter.to_frontier(seed) for seed in seeds]
self.manager.add_seeds(seeds=frontier_seeds)
def get_next_requests(self, max_next_requests=0, **kwargs):
frontier_requests = self.manager.get_next_requests(max_next_requests=max_next_requests, **kwargs)
return [self.request_converter.from_frontier(frontier_request) for frontier_request in frontier_requests]
def page_crawled(self, response, links=None):
frontier_response = self.response_converter.to_frontier(response)
frontier_links = [self.request_converter.to_frontier(link) for link in links]
self.manager.page_crawled(response=frontier_response,
links=frontier_links)
def request_error(self, request, error):
self.manager.request_error(request=self.request_converter.to_frontier(request),
error=error)
def finished(self):
return self.manager.finished
| {
"repo_name": "TeamHG-Memex/frontera",
"path": "frontera/utils/managers.py",
"copies": "1",
"size": "2158",
"license": "bsd-3-clause",
"hash": 9215412267348764000,
"line_mean": 48.0454545455,
"line_max": 118,
"alpha_frac": 0.6529193698,
"autogenerated": false,
"ratio": 4.316,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.002534810996250868,
"num_lines": 44
} |
from frontera.core.manager import FrontierManager
from converters import BaseRequestConverter, BaseResponseConverter
class FrontierManagerWrapper(object):
def __init__(self, settings):
self.manager = FrontierManager.from_settings(settings)
def start(self):
if not hasattr(self, 'request_converter'):
raise NotImplementedError("Request converter should be instantiated in subclass")
if not hasattr(self, 'response_converter'):
raise NotImplementedError("Response converter should be instantiated in subclass")
assert isinstance(self.request_converter, BaseRequestConverter), 'request_converter ' \
'must be instance of BaseRequestConverter'
assert isinstance(self.response_converter, BaseResponseConverter), 'response_converter ' \
'must be instance of BaseResponseConverter'
self.manager.start()
def stop(self):
self.manager.stop()
def add_seeds(self, seeds):
frontier_seeds = [self.request_converter.to_frontier(seed) for seed in seeds]
self.manager.add_seeds(seeds=frontier_seeds)
def get_next_requests(self, max_next_requests=0, **kwargs):
frontier_requests = self.manager.get_next_requests(max_next_requests=max_next_requests, **kwargs)
return [self.request_converter.from_frontier(frontier_request) for frontier_request in frontier_requests]
def page_crawled(self, response, links=None):
frontier_response = self.response_converter.to_frontier(response)
frontier_links = [self.request_converter.to_frontier(link) for link in links]
self.manager.page_crawled(response=frontier_response,
links=frontier_links)
def request_error(self, request, error):
self.manager.request_error(request=self.request_converter.to_frontier(request),
error=error)
| {
"repo_name": "rahulsharma1991/frontera",
"path": "frontera/utils/managers.py",
"copies": "5",
"size": "2045",
"license": "bsd-3-clause",
"hash": -5062291660967596000,
"line_mean": 50.125,
"line_max": 118,
"alpha_frac": 0.6528117359,
"autogenerated": false,
"ratio": 4.287211740041928,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.002788292095875955,
"num_lines": 40
} |
from frontera.settings import Settings
class FakeFrontierManager(object):
def __init__(self, settings):
self.settings = settings
self.auto_start = settings.get('AUTO_START')
self.iteration = 0
self.finished = False
self._started = True
self._stopped = False
self.seeds = []
self.requests = []
self.links = []
self.responses = []
self.errors = []
self.get_next_requests_kwargs = []
@classmethod
def from_settings(cls, settings=None):
settings = Settings.object_from(settings)
return FakeFrontierManager(settings)
def start(self):
self._started = True
def stop(self):
self._stopped = True
def add_seeds(self, seeds):
for seed in seeds:
self.seeds.append(seed)
def put_requests(self, requests):
for request in requests:
self.requests.append(request)
def get_next_requests(self, max_next_requests=0, **kwargs):
self.get_next_requests_kwargs.append(kwargs)
max_next_requests = max_next_requests or self.settings.get('MAX_NEXT_REQUESTS')
lst = []
for i in range(max_next_requests):
if self.requests:
lst.append(self.requests.pop())
self.iteration += 1
return lst
def page_crawled(self, response, links=None):
if links:
for link in links:
self.links.append(link)
self.responses.append(response)
def request_error(self, request, error):
self.errors.append((request, error))
| {
"repo_name": "TeamHG-Memex/frontera",
"path": "frontera/tests/mocks/frontier_manager.py",
"copies": "1",
"size": "1615",
"license": "bsd-3-clause",
"hash": -2373274256556816400,
"line_mean": 26.8448275862,
"line_max": 87,
"alpha_frac": 0.5913312693,
"autogenerated": false,
"ratio": 4.0375,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5128831269299999,
"avg_score": null,
"num_lines": null
} |
from fronter import *
class RoomInfo(Tool):
class Message:
def __init__(self, title, mid, url = None):
self.title = title
self.id = mid
self.url = url
self.text = ''
self.xml = None
self.fname = ''
self.menu = {'get' : url}
def str(self):
return '\n' + self.text + col('\n\nraw html: ', c.HL) + 'file://' + self.fname
def __init__(self, client, url):
super(RoomInfo, self).__init__()
self.client = client
self.PATH = client.TARGET + 'prjframe/index.phtml'
self.url = url
self.get_messages()
self.commands['ls'] = Tool.Command('ls', self.print_messages, '', 'list messages')
self.commands['get'] = Tool.Command('get', self.view, '<index>', 'print message')
#self.commands['post'] = Tool.Command('post', self.new, '', 'new message')
self.commands['put'] = Tool.Command('put', self.edit, '<index>', 'edit message')
self.commands['del'] = Tool.Command('del', self.delete, '<index>', 'delete message')
def get_messages(self):
xml = self.get_xml(self.url + '&show_news_all=1')
msg_tab = xml.xpath('//table[contains(@class, "news-element")]')[-1]
mids = msg_tab.xpath('//td[@class="content-header"]/a')
headers = msg_tab.xpath('.//div[@class="content-header2"]')
actions = msg_tab.xpath('.//div[@class="righttab2"]')
self.messages = []
title = mid = ''
for header, mid in zip(headers, mids):
try:
title = header.text_content().split('\n', 1)[0][:50]
mid = mid.get('name').replace('selected_news', '')
url = self.PATH + '?show_news_all=&expand=%s#selected_news%s' % (mid, mid)
msg = RoomInfo.Message(title, mid, url)
except IndexError:
msg = RoomInfo.Message(title, mid)
msg.text = title
msg.fname = html.to_file(header, add_meta=True)
self.messages.append(msg)
if actions:
for msg in self.messages:
msg.menu['put'] = self.PATH + '?add_new_news=1&edit_id=' + msg.id
msg.menu['del'] = self.PATH + '?news_save=1&del_id=' + msg.id
self.messages = self.messages[::-1]
def print_messages(self):
for idx, msg in enumerate(self.messages):
print(col('[%-3i] ' % (idx + 1), c.HL) +
'%-55s %s' % (msg.title + ' ... ', ', '.join(msg.menu)))
def _get_msg(self, idx):
idx = int(idx) - 1
if idx < 0:
raise IndexError
return self.messages[idx]
def new(self):
pass
def delete(self, idx):
msg = self._get_msg(idx)
if not 'del' in msg.menu:
print(col(' !! not authorized to delete'), c.ERR)
return
self.get(msg.menu['del'])
self.get_messages()
def edit(self, idx):
msg = self._get_msg(idx)
if not 'put' in msg.menu:
print(col(' !! not authorized to edit'), c.ERR)
return
if not msg.fname:
self.read(msg)
is_mod = txt.edit(msg.fname)
if is_mod:
xml = self.get_xml(msg.menu['put'])
payload = self.get_form(xml)
# Read new message
with open(msg.fname, 'rb') as f:
msg.text = f.read()
payload['body'] = msg.text
payload['form_is_changed'] = 1
payload['news_edit'] = msg.id
self.post(self.PATH, payload)
# Refresh and print
msg.xml = None
msg.text = ''
self.view(idx)
msg.title = msg.text.split('\n', 1)[0][:50]
def read(self, msg):
xml = self.get_xml(msg.menu['get'])
link = xml.xpath('//a[@name="selected_news%s"]' % msg.id)[0]
msg.xml = link.getnext().xpath('.//div[@class="content-header2"]')[0]
msg.fname = html.to_file(msg.xml, add_meta=True)
def view(self, idx):
msg = self._get_msg(idx)
if msg.text:
print(msg.str())
return
if msg.xml is None:
self.read(msg)
# Some short messages are just plain text
msg.text = msg.xml.text or ''
# Parse HTML
msg.text = html.to_text(msg.xml)
print(msg.str())
| {
"repo_name": "kjempelodott/ananas",
"path": "fronter/roominfo.py",
"copies": "1",
"size": "4499",
"license": "mit",
"hash": 1287192356156944000,
"line_mean": 27.8397435897,
"line_max": 93,
"alpha_frac": 0.4945543454,
"autogenerated": false,
"ratio": 3.570634920634921,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4565189266034921,
"avg_score": null,
"num_lines": null
} |
from fronter import *
unescape = HTMLParser().unescape
def to_file(xml, add_meta=False):
fd, fname = mkstemp(prefix='fronter_', suffix='.html')
string = '' if not add_meta else \
'<meta http-equiv="Content-Type" content="text/html; charset=utf-8">\n'
string += re.sub('</?div.*?>', '', unescape(tostring(xml).decode('utf-8')))
with os.fdopen(fd, 'wb') as f:
f.write(string.encode('utf-8'))
return fname
def to_text(xml):
content = ''
for elem in xml:
if elem.tag == 'table':
rows = []
try:
for tr in elem:
rows.append([td.text_content().strip() for td in tr])
widths = list(map(max, [map(len, clm) for clm in zip(*rows)]))
pieces = ['%-' + str(w + 2) + 's' for w in widths]
table_content = '\n' + '-' * (sum(widths) + 4 + 2*len(widths)) + '\n'
for row in rows:
table_content += '| ' + ''.join(pieces) % tuple(row) + ' |\n'
table_content += '-' * (sum(widths) + 4 + 2*len(widths)) + '\n'
content += table_content
except:
content += col('!! badass table', c.ERR)
elif elem.tag == 'ul':
content += '\n'
for li in elem:
content += ' * ' + li.text_content() + '\n'
content += '\n'
elif elem.tag == 'ol':
content += '\n'
for i, li in enumerate(elem):
content += ' %i. ' % (i + 1) + li.text_content() + '\n'
content += '\n'
else:
content += elem.text_content()
# Trailing text after <br> etc ...
content += elem.tail or ''
return wrap(content)
| {
"repo_name": "kjempelodott/ananas",
"path": "fronter/html.py",
"copies": "1",
"size": "1777",
"license": "mit",
"hash": -7291342526370586000,
"line_mean": 29.1186440678,
"line_max": 85,
"alpha_frac": 0.4575126618,
"autogenerated": false,
"ratio": 3.648870636550308,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.46063832983503084,
"avg_score": null,
"num_lines": null
} |
from frontpage.models import Article, Profile, Media, ArticleGroup
from frontpage.uitools import body
from django.http import HttpRequest
from frontpage.management.magic import get_current_user, get_article_pcs_free
from frontpage.management.form import Form, PlainText, SubmitButton, NumberField, TextField, CheckBox, CheckEnum
from frontpage.management.dashboard_page import render_error_panel
def generate_edit_link(a: Article):
return "/admin/articles/edit?article_id=" + str(a.pk)
def render_vs_status(b: bool):
if b:
return 'Visible'
else:
return '<img class="icon" src="/staticfiles/frontpage/error.png" alt="Invisible"/>'
def render_objects_form(request: HttpRequest):
items_per_page = 50
if request.GET.get('objects'):
items_per_page = int(request.GET["objects"])
name_filter = ""
if request.GET.get('namefilter'):
name_filter = str(request.GET["namefilter"])
only_visible = False
if request.GET.get("onlyvisible"):
only_visible = True
f: Form = Form(request.path)
f.method = "get"
f.add(PlainText('<details><summary class="button">Toggle filter</summary><h4>Filter:</h4><br />Objects per page: '))
f.add(NumberField(name="objects", button_text=str(items_per_page)))
f.add(PlainText("Description: "))
f.add(TextField(name="namefilter", button_text=name_filter, required=False))
f.add(CheckBox(text="Display only visible entries: ", name="onlyvisible", checked=CheckEnum.get_state(only_visible)))
f.add(SubmitButton(button_text="Sort"))
f.add(PlainText("</details>"))
return f.render_html(request)
def get_group_prices(g: ArticleGroup):
articles = Article.objects.all().filter(group=g)
prices = []
visible = True
for a in articles:
if not a.price in prices:
prices.append(a.price)
visible &= a.visible
a = ''
for p in prices:
if a != '':
a += ', '
a += body.render_price(p)
return (a, visible)
def render_group_list(request: HttpRequest, u: Profile):
a = '<h4>Article groups:</h4>'
if u.rights > 1:
a += '<a href="/admin/articles/editgroup" class="button">Add Group</a><br />'
prefilter = ArticleGroup.objects.all()
if request.GET.get("namefilter"):
prefilter = prefilter.filter(group_name=str(request.GET["namefilter"]))
a += '<br /><table><tr>'
if u.rights > 1:
a += '<th> Edit </th><th> Group ID </th>'
a += '<th> Article </th><th> Preview </th><th> Price </th><th> Visible </th></tr>'
for g in prefilter:
a += '<tr>'
if u.rights > 1:
a += '<td><a href="/admin/articles/editgroup?gid=' + str(g.id) + \
'" ><img src="/staticfiles/frontpage/edit.png" class="button-img" />' + \
'</a></td><td>' + str(g.id) + '</td>'
a += '<td>' + str(g.group_name) + '</td><td>' + body.render_image(g.group_flash_image, cssclass="icon")
t = get_group_prices(g)
a += '</td><td>' + t[0] + '</td><td>' + str(t[1]) + '</td></tr>'
a += '</table>'
return a
def render_alone_article_list(request: HttpRequest, u: Profile):
# TODO add method to select how many posts to display
# TODO make layout more fancy
page = 1
items_per_page = 50
if request.GET.get('objects'):
items_per_page = int(request.GET["objects"])
prefilter = Article.objects.all().filter(group=None).filter(underConstruction=False)
if request.GET.get("namefilter"):
prefilter = prefilter.filter(description=str(request.GET.get("namefilter")))
if request.GET.get("onlyvisible"):
prefilter = prefilter.filter(visible=True)
total_items = prefilter.count()
max_page = total_items / items_per_page
if max_page < 1:
max_page = 1
if request.GET.get('page'):
page = int(request.GET["page"])
if page > max_page:
page = max_page
start_range = 1 + page * items_per_page
if start_range > total_items:
start_range = 0
end_range = (page + 1) * items_per_page
a = '<h4>Stand alone Articles:</h4>'
if u.rights > 1:
a += '<a href="/admin/articles/edit" class="button">Add a new Article</a><br/>'
a += '<br /><table><tr>'
if u.rights > 1:
a += '<th>Edit</th>'
a += '<th> Article ID </th><th> Preview </th><th> Description </th><th> Size </th>' \
'<th> Price </th><th> Pcs left </th>'
if u.rights > 1:
a += '<th> Visibility </th>'
a += '</tr>'
objects = prefilter.filter(pk__range=(start_range, end_range))
for article in objects:
a += '<tr>'
if u.rights > 1:
a += '<td><a href="' + generate_edit_link(article) + '">' \
'<img src="/staticfiles/frontpage/edit.png" class="button-img"/></a></td>'
a += '<td>' + str(article.pk) + "</td><td>" + body.render_image(article.flashImage, cssclass="icon")\
+ "</td><td>" + article.description + "</td><td>"\
+ article.size + "</td><td>" + body.render_price(article.price) + "</td><td>"\
+ str(get_article_pcs_free(article)) + "</td>"
if u.rights > 1:
a += "<td>" + render_vs_status(article.visible) + "</td>"
a += "</tr>"
a += '</table><br />'
if page > 1:
a += '<a href="' + request.path + '?page=' + str(page - 1) + '&objects=' + str(items_per_page) \
+ '" class="button"> Previous page </a>'
if page < max_page:
a += '<a href="' + request.path + '?page=' + str(page + 1) + '&objects=' + str(items_per_page) \
+ '" class="button"> Next page </a>'
a += '<center>displaying page ' + str(page) + ' of ' + str(max_page) + ' total pages.</center>'
return a
def render_article_page(request: HttpRequest):
forcefilter: bool = False
if request.GET.get("showfilter"):
forcefilter = True
u: Profile = get_current_user(request)
a = render_error_panel(request)
a += '<div class="admin-popup w3-row w3-padding-64 w3-twothird w3-container"><h3>Articles:</h3><br/>'
if u.rights > 1 or forcefilter:
a += render_objects_form(request)
a += render_group_list(request, u)
a += render_alone_article_list(request, u)
if u.rights > 1 or forcefilter:
a += render_objects_form(request)
a += '</div>'
return a
| {
"repo_name": "Technikradio/C3FOCSite",
"path": "c3shop/frontpage/management/articletools/article_page.py",
"copies": "1",
"size": "6381",
"license": "bsd-3-clause",
"hash": 4196826863107259400,
"line_mean": 39.3860759494,
"line_max": 121,
"alpha_frac": 0.5840777308,
"autogenerated": false,
"ratio": 3.330375782881002,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4414453513681002,
"avg_score": null,
"num_lines": null
} |
from frontynium.exceptions import ObjectMappingNotFound, InvalidObjectMapping
from frontynium.finder import Finder, ExpressionBuilder
from types import FunctionType
class Page(object):
"""Implementation of the PageObject Pattern (https://code.google.com/p/selenium/wiki/PageObjects)
This class provides tools that are useful to manipulate a web UI using selenium. This class is not meant
to be used directly but being subclassed.
Ex:
class MyAwesomePage(Page):
def __init___(self, webdriver):
Page.__init__(self, webdriver)
#In this dictionary we will put all the mapping for the controls, eg a sample name and the detection
#mean.
self._mapped_objects = {
'input': self._finder.by_css("input#test-123")
'validate_button': self._finder.by_css("div.button")
}
def search_data(self, value):
"""
def __init__(self, root_node):
"""Default constructor
:param root_node: instance of selenium's webdriver or webelement
"""
self._root_node = root_node
self._finder = Finder(self._root_node)
self._mapped_objects = {}
for obj in self._mapped_objects:
if not isinstance(obj, ExpressionBuilder):
raise InvalidObjectMapping("You must use the methods available in the finder.")
#Preventing a var = Page(webdriver) in a test code as _mapped_objects needs to be defined correctly
def __new__(cls, *args, **kwargs):
if cls is Page:
raise TypeError("This class is not meant to be instantiated directly but subclassed.")
return object.__new__(cls, *args, **kwargs)
def detect_objects(self, object_name, *args, **kwargs):
"""Trigger the detection of objects
:param object_name: the name of the mapping as defined in self._objects
:returns WebElement or list<WebElement>
"""
element = None
try:
mapping = self._mapped_objects[object_name].build()
if type(mapping) == FunctionType:
element = mapping(*args, **kwargs)
except KeyError:
raise ObjectMappingNotFound("Object {0} has not been found in the objects dictionary.".format(object_name))
return element
def click_on(self, object_name, *args, **kwargs):
"""Click on an object defined in _objects
:param object_name: the name of the mapping as defined in self._objects
:returns the current instance of Page
"""
element = self.detect_objects(object_name, *args, **kwargs)
element.click()
return self
def set_value_into(self, object_name, value, clear_before_use=False, *args, **kwargs):
"""Set a value into a mapped field
:param object_name: the name of the mapping as defined in self._objects
:param value: the string that will be input into the control
:param clear_before_use: empty the field before use
:returns the current instance of Page
"""
element = self.detect_objects(object_name, *args, **kwargs)
if type(element) == list:
element = element[0]
if clear_before_use:
element.clear()
element.send_keys(value)
return self
@property
def root_node(self):
return self._root_node
@property
def objects(self):
return self._mapped_objects
@objects.setter
def objects(self, value):
self._mapped_objects = value
def gettable(*args):
"""Decorator that allows the creation of getter functions to expose the WebElement(s)
:param *args: str, each str being the name of a mapping inside Page._mapped_objects
"""
def inner(cls):
for arg in args:
def getter(cls, *sargs, **skwargs):
return cls.detect_objects(arg, *sargs, **skwargs)
setattr(cls, 'get_' + arg, getter)
return cls
return inner
def settable(*args):
def inner(cls):
for arg in args:
def setter(cls, value, clear_before_use=False, *sargs, **skwargs):
obj = cls.detect_objects(arg, *sargs, **skwargs)
if type(obj) == list:
obj = obj[0]
if clear_before_use:
obj.clear()
obj.send_keys(value)
return cls
setattr(cls, 'set_' + arg, setter)
return cls
return inner
def clickable(*args):
def inner(cls):
for arg in args:
def clicker(cls):
cls.click_on(arg)
return cls
setattr(cls, 'click_on_' + arg, clicker)
return cls
return inner
| {
"repo_name": "Ketouem/python-frontynium",
"path": "frontynium/page.py",
"copies": "1",
"size": "4762",
"license": "mit",
"hash": 2349407716269278700,
"line_mean": 33.2589928058,
"line_max": 119,
"alpha_frac": 0.5999580008,
"autogenerated": false,
"ratio": 4.274685816876122,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0011138695990463316,
"num_lines": 139
} |
from frostsynth import epsilon, linspace
from frostsynth.polytable import CubicTable
def zeros(f, resolution=0.01, iterations=100):
t0 = 0
f0 = f(t0)
if abs(f0) < epsilon:
yield 0
t1 = resolution
f1 = f(t1)
while True:
while f0 * f1 >= 0:
t0 = t1
f0 = f1
t1 += resolution
f1 = f(t1)
for _ in range(iterations):
midpoint = (t1 + t0) * 0.5
fm = f(midpoint)
if f0 * fm < 0:
t1 = midpoint
f1 = fm
else:
t0 = midpoint
f0 = fm
yield (t1 + t0) * 0.5
t0 = t1
f0 = f1
t1 += resolution
f1 = f(t1)
def optima(f, lattice_constant=epsilon, resolution=0.01, iterations=100):
lattice_constant *= 0.5
return zeros(lambda x: f(x + lattice_constant) - f(x - lattice_constant), resolution=resolution, iterations=iterations)
def zeros_in(f, x0, x1, samples=100, iterations=100):
dt = (x1 - x0) / samples
f1 = f(x0)
t1 = x0
ranges = []
for _ in range(samples):
t1 += dt
f0 = f1
f1 = f(t1)
if f0 * f1 < 0:
ranges.append((t1 - dt, t1))
for t0, t1 in ranges:
f0 = f(t0)
f1 = f(t1)
for _ in range(iterations):
midpoint = (t1 + t0) * 0.5
fm = f(midpoint)
if f0 * fm < 0:
t1 = midpoint
f1 = fm
else:
t0 = midpoint
f0 = fm
yield (t1 + t0) * 0.5
def cubic_approximate(f, df, x0, x1, samples, periodic=False):
if periodic:
xs = linrange(x0, x1, samples)
dscale = (x1 - x0) / samples
else:
xs = linspace(x0, x1, samples)
dscale = (x1 - x0) / (samples - 1)
values = [f(x) for x in xs]
derivatives = [dscale * df(x) for x in xs]
ct = CubicTable(zip(values, derivatives), periodic=periodic)
if periodic:
scale = len(ct) / (x1 - x0)
else:
scale = (len(ct) - 2) / (x1 - x0)
return lambda x: ct((x - x0) * scale)
| {
"repo_name": "frostburn/frostsynth",
"path": "frostsynth/numeric.py",
"copies": "1",
"size": "2132",
"license": "mit",
"hash": -2711941201260535000,
"line_mean": 25.9873417722,
"line_max": 123,
"alpha_frac": 0.4788930582,
"autogenerated": false,
"ratio": 3.0632183908045976,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.40421114490045973,
"avg_score": null,
"num_lines": null
} |
from frostsynth import *
from frostsynth.interpolation import *
def resampler0_gen(source, ratio):
"""Dynamic resampling of 'source' by variable ratio. Constant interpolation."""
ratio = to_iterable(ratio)
sample = next(source)
index = 0.0
source_index = 1
while True:
yield sample
index += next(ratio)
while source_index <= index:
sample = next(source)
source_index += 1
def resampler1_gen(source, ratio):
"""Dynamic resampling of 'source' by variable ratio. First order interpolation."""
source = chain(source, [0.0])
ratio = to_iterable(ratio)
index = 0.0
sample0 = next(source)
sample1 = next(source)
source_index = 1
while True:
yield sample0 + (sample1 - sample0) * (index + 1 - source_index)
index += next(ratio)
while source_index <= index:
sample0 = sample1
sample1 = next(source)
source_index += 1
def resampler3_gen(source, ratio):
"""Dynamic resampling of 'source' by variable ratio. Third order Lagrange interpolation."""
source = chain(source, [0.0, 0.0])
ratio = to_iterable(ratio)
index = 0.0
sample0 = 0.0
sample1 = next(source)
sample2 = next(source)
sample3 = next(source)
source_index = 1
while True:
yield lagrange_four_point(index + 1 - source_index, sample0, sample1, sample2, sample3)
index += next(ratio)
while source_index <= index:
sample0 = sample1
sample1 = sample2
sample2 = sample3
sample3 = next(source)
source_index += 1
def analytic_resample1(f, frequency, sub_samples=1, variability=0, srate=None):
dt = 1 / get_srate(srate)
ratio = 1 / sub_samples
frequency = to_iterable(frequency)
y1 = f(0)
target = 0
phase = target
for sample in frequency:
if phase >= target:
y0 = y1
y1 = f(phase)
prev_target = target
d_target = (1.0 + (random() - 0.5) * variability) * ratio
target += d_target
dp = (y1 - y0) / d_target
yield y0 + dp * (phase - prev_target)
phase += sample * dt
| {
"repo_name": "frostburn/frostsynth",
"path": "frostsynth/resample.py",
"copies": "1",
"size": "2229",
"license": "mit",
"hash": 5971018846677080000,
"line_mean": 27.9480519481,
"line_max": 95,
"alpha_frac": 0.579632122,
"autogenerated": false,
"ratio": 3.70265780730897,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.977941769891211,
"avg_score": 0.0005744460793721877,
"num_lines": 77
} |
from frozendict import frozendict
from os import environ
import googlemaps
from pyrules2.route import Place, Route
__author__ = 'nhc'
# Read Google Maps API key from environment, and create a client object if the key was there
try:
key = environ['GOOGLE_MAPS_API_KEY']
_client_object = googlemaps.Client(key=key)
def _client_():
return _client_object
except KeyError:
def _client_():
raise Exception('''To use Google Maps with pyrules, put your
Google Maps API key in the environment variable "GOOGLE_MAPS_API_KEY"''')
class Driving(object):
@staticmethod
def route(*places):
"""
:param places: A sequence of Places,
e.g. (place('New York'), place('Chicago'), place('Los Angeles'), place('New York'))
:return: An immutable object representing the route visiting
the given places in sequence,
e.g. New York -> Chicago -> Los Angeles -> New York
All distances and durations will be based on driving.
"""
places_tuple = tuple(p if isinstance(p, Place) else Place(p) for p in places)
leg_costs = frozendict(_google_maps_leg_costs('driving', places_tuple))
return Route(places=places_tuple, leg_costs=leg_costs)
def _google_maps_leg_costs(mode, places):
"""
Looks up distances and durations on Google Maps.
:param mode: A Google Maps mode, e.g. 'driving'.
:param places: An iterable of Places.
:return: A dict mapping each of 'duration' and 'distance' to
a frozendict mapping Place pairs to relevant values.
"""
for waypoint in places:
assert isinstance(waypoint, Place)
distance = dict()
duration = dict()
# Call Google Maps API
response = _client_().distance_matrix(origins=[p.address for p in places],
destinations=[p.address for p in places],
mode=mode,
units='metric')
# Verify and parse response
assert response['status'] == 'OK'
rows = response['rows']
assert len(rows) == len(places)
# Populate the dicts distance and duration
for row, origin in zip(rows, places):
row_elements = row['elements'] # There's also data about exact addresses used
assert len(row_elements) == len(places)
for element, destination in zip(row_elements, places):
assert element['status'] == 'OK'
duration[(origin, destination)] = element['duration']['value']
distance[(origin, destination)] = element['distance']['value']
# Construct and return the dict
return {'distance': frozendict(distance), 'duration': frozendict(duration)}
| {
"repo_name": "mr-niels-christensen/pyrules",
"path": "src/pyrules2/route_gmaps.py",
"copies": "1",
"size": "2719",
"license": "mit",
"hash": -1640888612458466000,
"line_mean": 39.5820895522,
"line_max": 92,
"alpha_frac": 0.6303788157,
"autogenerated": false,
"ratio": 4.151145038167939,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5281523853867939,
"avg_score": null,
"num_lines": null
} |
from frozendict import frozendict
import re
_VM_KWARGS = (
('ostype', 1),
('ostype_text', 'test'),
('dc_name', 'test'),
('disk_image', 'test'),
('disk_image_abbr', 'test'),
)
VM_KWARGS = frozendict(_VM_KWARGS)
VM_KWARGS_KEYS = tuple(VM_KWARGS.keys())
VM_KWARGS_NIC = frozendict(_VM_KWARGS + (('net', 1), ('nic_id', 2)))
VM_KWARGS_DISK = frozendict(_VM_KWARGS + (('disk', 1), ('disk_id', 2)))
NODE_KWARGS = frozendict()
NODE_KWARGS_KEYS = tuple(NODE_KWARGS.keys())
class FakeDetailLog(object):
"""
Dummy list-like object used for collecting log lines.
"""
def add(self, *args):
pass
LOG = FakeDetailLog()
class MonitoringError(Exception):
"""
Base monitoring exception. Other monitoring exceptions must inherit from this class.
"""
pass
class AbstractMonitoringBackend(object):
"""
Base Monitoring class. Other monitoring backends must inherit from this class.
"""
NOT_CLASSIFIED = 0
INFORMATION = 1
WARNING = 2
AVERAGE = 3
HIGH = 4
DISASTER = 5
RE_MONITORING_HOSTGROUPS = re.compile(r'^[\w\s.\-,\"{\}]+$')
server_class = NotImplemented
def __init__(self, dc, **kwargs):
self.dc = dc
def __hash__(self):
raise NotImplementedError
def enabled(self):
raise NotImplementedError
@property
def connected(self):
raise NotImplementedError
def reset_cache(self):
raise NotImplementedError
def task_log_success(self, task_id, **kwargs):
raise NotImplementedError
def task_log_error(self, task_id, **kwargs):
raise NotImplementedError
@classmethod
def vm_send_alert(cls, vm, msg, **kwargs):
raise NotImplementedError
@classmethod
def node_send_alert(cls, node, msg, **kwargs):
raise NotImplementedError
def vm_sla(self, vm_node_history):
raise NotImplementedError
def vm_history(self, vm_host_id, items, zhistory, since, until, **kwargs):
raise NotImplementedError
def is_vm_host_created(self, vm):
raise NotImplementedError
def vm_sync(self, vm, force_update=False, task_log=LOG):
raise NotImplementedError
def vm_disable(self, vm, task_log=LOG):
raise NotImplementedError
def vm_delete(self, vm, internal=True, external=True, task_log=LOG):
raise NotImplementedError
def node_sla(self, node_hostname, since, until):
raise NotImplementedError
def node_sync(self, node, task_log=LOG):
raise NotImplementedError
def node_status_sync(self, node, task_log=LOG):
raise NotImplementedError
def node_delete(self, node, task_log=LOG):
raise NotImplementedError
def node_history(self, node_id, items, zhistory, since, until, items_search=None):
raise NotImplementedError
def template_list(self, full=False, extended=False):
raise NotImplementedError
def hostgroup_list(self):
raise NotImplementedError
def hostgroup_detail(self, name):
raise NotImplementedError
def hostgroup_create(self, name):
raise NotImplementedError
def hostgroup_delete(self, name):
raise NotImplementedError
def alert_list(self):
raise NotImplementedError
def user_group_sync(self, group=None, dc_as_group=None):
raise NotImplementedError
def user_group_delete(self, name):
raise NotImplementedError
def user_sync(self, user):
raise NotImplementedError
def user_delete(self, name):
raise NotImplementedError
def action_list(self):
raise NotImplementedError
def action_detail(self, name):
raise NotImplementedError
def action_update(self, name, data):
raise NotImplementedError
def action_create(self, name, data):
raise NotImplementedError
def action_delete(self, name):
raise NotImplementedError
| {
"repo_name": "erigones/esdc-ce",
"path": "api/mon/backends/abstract/__init__.py",
"copies": "1",
"size": "3935",
"license": "apache-2.0",
"hash": 2493233355199858700,
"line_mean": 23.748427673,
"line_max": 88,
"alpha_frac": 0.6531130877,
"autogenerated": false,
"ratio": 4.060887512899897,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00021873207987941715,
"num_lines": 159
} |
from frozen_dict import FrozenDict
def freeze(obj):
''' Recursive function which turns dictionaries into
FrozenDict objects, lists into tuples, and sets
into frozensets.
Can also be used to turn JSON data into a hasahable value.
'''
try:
#See if the object is hashable
hash(obj)
return obj
except TypeError:
pass
try:
#Try to see if this is a mapping
try:
obj[tuple(obj)]
except KeyError:
is_mapping = True
else:
is_mapping = False
except (TypeError, IndexError):
is_mapping = False
if is_mapping:
frz = {k: freeze(obj[k]) for k in obj}
return FrozenDict(frz)
# See if the object is a set like
# or sequence like object
try:
obj[0]
cls = tuple
except TypeError:
cls = frozenset
except IndexError:
cls = tuple
try:
itr = iter(obj)
is_iterable = True
except TypeError:
is_iterable = False
if is_iterable:
return cls(freeze(i) for i in obj)
msg = 'Unsupported type: %r' % type(obj).__name__
raise TypeError(msg)
| {
"repo_name": "zelaznik/frozen_dict",
"path": "freeze_recursive.py",
"copies": "1",
"size": "1219",
"license": "mit",
"hash": 6448178210015224000,
"line_mean": 22,
"line_max": 66,
"alpha_frac": 0.5537325677,
"autogenerated": false,
"ratio": 4.2622377622377625,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.01034563019788295,
"num_lines": 53
} |
from frozendict import frozendict
from api.decorators import lock
from api.mon.messages import MON_OBJ_HOSTGROUP
from api.mon.backends.zabbix.exceptions import (RemoteObjectDoesNotExist, RemoteObjectAlreadyExists,
RemoteObjectManipulationError)
from api.mon.backends.zabbix.containers.base import ZabbixBaseContainer
class ZabbixHostGroupContainer(ZabbixBaseContainer):
"""
Container class for the Zabbix HostGroup object.
There are two types of hostgroups:
- dc-bound (local): with a DC prefix; visible and usable from a specific DC
- dc-unbound (global): without a DC prefix; visible from everywhere and by everyone; editable only by SuperAdmin
"""
ZABBIX_ID_ATTR = 'groupid'
NAME_MAX_LENGTH = 64
QUERY_BASE = frozendict({'output': ['name', 'groupid']})
QUERY_EXTENDED = frozendict({'output': ['name', 'groupid'], 'selectHosts': 'count'})
def __init__(self, name, dc_bound=False, **kwargs):
self.new = False # Used by actions
self.dc_bound = dc_bound
super(ZabbixHostGroupContainer, self).__init__(name, **kwargs)
@classmethod
def hostgroup_name_factory(cls, dc_name, hostgroup_name):
if dc_name: # local hostgroup
name = cls.trans_dc_qualified_name(hostgroup_name, dc_name)
if len(name) > cls.NAME_MAX_LENGTH:
raise ValueError('dc_name + group name should have less than 61 chars, '
'but they have %d instead: %s %s' % (len(name), dc_name, hostgroup_name))
else:
name = hostgroup_name # global hostgroup
return name
@classmethod
def from_mgmt_data(cls, zapi, name, **kwargs):
return cls(name, zapi=zapi, **kwargs)
@classmethod
def from_zabbix_name(cls, zapi, name, **kwargs):
container = cls(name, zapi=zapi, **kwargs)
container.refresh()
return container
@classmethod
def from_zabbix_data(cls, zapi, zabbix_object, **kwargs):
return cls(zabbix_object['name'], zapi=zapi, zabbix_object=zabbix_object, **kwargs)
@classmethod
def from_zabbix_ids(cls, zapi, zabbix_ids):
params = dict({'groupids': zabbix_ids}, **cls.QUERY_EXTENDED)
response = cls.call_zapi(zapi, 'hostgroup.get', params=params)
return [cls.from_zabbix_data(zapi, item) for item in response]
@classmethod
def _is_visible_from_dc(cls, zabbix_object, dc_name, allow_global=False):
match = cls.RE_NAME_WITH_DC_PREFIX.match(zabbix_object['name'])
if match:
# RE_NAME_WITH_DC_PREFIX results in exactly two (named) groups: dc name and hostgroup name:
return match.group('dc_name') == dc_name
else:
return allow_global
@classmethod
def all(cls, zapi, dc_name, include_global=False, count_hosts=False, **kwargs):
if count_hosts:
query = cls.QUERY_EXTENDED
else:
query = cls.QUERY_BASE
response = cls.call_zapi(zapi, 'hostgroup.get', params=dict(query))
if dc_name is not None:
response = (hostgroup for hostgroup in response if cls._is_visible_from_dc(hostgroup, dc_name,
allow_global=include_global))
return [cls.from_zabbix_data(zapi, item, **kwargs) for item in response]
@classmethod
def _clear_hostgroup_list_cache(cls, name):
from api.mon.base.api_views import MonHostgroupView
# Invalidate cache for mon_hostgroup_list only if we have dc_name
match = cls.RE_NAME_WITH_DC_PREFIX.match(name)
if match:
dc_name = match.group('dc_name')
for dc_bound in (True, False):
for full in (True, False):
MonHostgroupView.clear_cache(dc_name, dc_bound, full=full)
def refresh(self):
params = dict(filter={'name': self.name}, **self.QUERY_EXTENDED)
self._api_response = self._call_zapi('hostgroup.get', params=params, mon_object=MON_OBJ_HOSTGROUP,
mon_object_name=self.name_without_dc_prefix)
zabbix_object = self.parse_zabbix_get_result(self._api_response, mon_object=MON_OBJ_HOSTGROUP,
mon_object_name=self.name_without_dc_prefix)
self.init(zabbix_object)
def _update_related_user_groups(self):
from api.mon.backends.zabbix.containers.user_group import ZabbixUserGroupContainer
match = self.RE_NAME_WITH_DC_PREFIX.match(self.name)
if match:
dc_name = match.group('dc_name')
for related_user_group in ZabbixUserGroupContainer.all(self._zapi, dc_name, resolve_users=False):
related_user_group.add_hostgroup_right(self.zabbix_id)
def create(self):
params = {'name': self.name}
self._api_response = self._call_zapi('hostgroup.create', params=params, mon_object=MON_OBJ_HOSTGROUP,
mon_object_name=self.name_without_dc_prefix)
self.zabbix_id = int(self.parse_zabbix_create_result(self._api_response, 'groupids',
mon_object=MON_OBJ_HOSTGROUP,
mon_object_name=self.name_without_dc_prefix))
self.zabbix_object = params
# Invalidate cache for mon_hostgroup_list
self._clear_hostgroup_list_cache(self.name)
# Add our new hostgroup to all user groups related to our DC prefix
self._update_related_user_groups()
self.new = True
return self.CREATED
@classmethod
@lock(key_args=(1,), wait_for_release=True, bound=True)
def create_from_name(cls, zapi, name):
container = cls(name, zapi=zapi)
try:
container.create()
except RemoteObjectAlreadyExists:
container.refresh()
return container
@classmethod
def get_or_create(cls, zapi, name):
try:
return ZabbixHostGroupContainer.from_zabbix_name(zapi, name)
except RemoteObjectDoesNotExist:
return ZabbixHostGroupContainer.create_from_name(zapi, name)
def delete(self):
assert self.zabbix_id, 'Cannot delete Hostgroup without groupid'
if int(self.zabbix_object.get('hosts', 0)):
raise RemoteObjectManipulationError(detail='{mon_object} is not empty',
mon_object=MON_OBJ_HOSTGROUP, name=self.name_without_dc_prefix)
self._api_response = self._call_zapi('hostgroup.delete', params=[self.zabbix_id],
mon_object=MON_OBJ_HOSTGROUP,
mon_object_name=self.name_without_dc_prefix)
assert self.zabbix_id == int(self.parse_zabbix_delete_result(self._api_response, 'groupids'))
self.reset()
# Invalidate cache for mon_hostgroup_list
self._clear_hostgroup_list_cache(self.name)
return self.DELETED
@property
def name_without_dc_prefix(self):
return self.trans_dc_qualified_name(self.name, from_zabbix=True)
@property
def as_mgmt_data(self):
match = self.RE_NAME_WITH_DC_PREFIX.match(self.name)
hosts = int(self.zabbix_object.get('hosts', 0))
if match:
name = match.group('name')
else:
name = self.name
# Hide host count for non-local hostgroup
if self.dc_bound:
hosts = None
return {
'id': self.zabbix_id,
'name': name,
'hosts': hosts,
'dc_bound': bool(match),
}
| {
"repo_name": "erigones/esdc-ce",
"path": "api/mon/backends/zabbix/containers/host_group.py",
"copies": "1",
"size": "7823",
"license": "apache-2.0",
"hash": 4155683692752875000,
"line_mean": 39.324742268,
"line_max": 120,
"alpha_frac": 0.5977246581,
"autogenerated": false,
"ratio": 3.9075924075924076,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5005317065692407,
"avg_score": null,
"num_lines": null
} |
from frozendict import frozendict
from api import serializers as s
from vms.models import Node
class NodeSerializer(s.Serializer):
"""
Node details serializer (read-only).
"""
hostname = s.Field()
address = s.Field()
status = s.IntegerChoiceField(choices=Node.STATUS_DB, read_only=True)
node_status = s.DisplayChoiceField(source='status', choices=Node.STATUS_DB, read_only=True)
owner = s.SlugRelatedField(slug_field='username', read_only=True)
is_head = s.BooleanField(read_only=True)
cpu = s.IntegerField(source='cpu_total', read_only=True)
ram = s.IntegerField(source='ram_total', read_only=True)
cpu_free = s.IntegerField(read_only=True)
ram_free = s.IntegerField(read_only=True)
ram_kvm_overhead = s.IntegerField(read_only=True)
class ExtendedNodeSerializer(NodeSerializer):
"""
Extended node details serializer (read-only).
"""
extra_select = frozendict({
'vms': '''SELECT COUNT(*) FROM "vms_vm" WHERE "vms_node"."uuid" = "vms_vm"."node_id"''',
'real_vms': '''SELECT COUNT(*) FROM "vms_vm" LEFT OUTER JOIN "vms_slavevm" ON
( "vms_vm"."uuid" = "vms_slavevm"."vm_id" ) WHERE "vms_node"."uuid" = "vms_vm"."node_id" AND
"vms_slavevm"."id" IS NULL''',
'snapshots': '''SELECT COUNT(*) FROM "vms_snapshot" LEFT OUTER JOIN "vms_vm" ON
( "vms_vm"."uuid" = "vms_snapshot"."vm_id" ) WHERE "vms_node"."uuid" = "vms_vm"."node_id"''',
'backups': '''SELECT COUNT(*) FROM "vms_backup" WHERE "vms_node"."uuid" = "vms_backup"."node_id"'''
})
dcs = s.DcsField()
vms = s.IntegerField(read_only=True)
snapshots = s.IntegerField(read_only=True)
backups = s.IntegerField(read_only=True)
real_vms = s.IntegerField(read_only=True)
| {
"repo_name": "erigones/esdc-ce",
"path": "api/node/base/serializers.py",
"copies": "1",
"size": "1758",
"license": "apache-2.0",
"hash": -6024319982136754000,
"line_mean": 40.8571428571,
"line_max": 107,
"alpha_frac": 0.649032992,
"autogenerated": false,
"ratio": 3.279850746268657,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.44288837382686563,
"avg_score": null,
"num_lines": null
} |
from frozendict import frozendict
from api import serializers as s
from vms.models import Vm, Node
class VmCreateSerializer(s.Serializer):
recreate = s.BooleanField(default=False)
force = s.BooleanField(default=False)
class VmBaseSerializer(s.Serializer):
"""Save request and display node color for non-admin users"""
def __init__(self, request, *args, **kwargs):
self.request = request
super(VmBaseSerializer, self).__init__(*args, **kwargs)
@property
def data(self):
if self._data is None:
data = super(VmBaseSerializer, self).data
if self.many:
data_list = data
obj_list = self.object
else:
data_list = [data]
obj_list = [self.object]
for i, vm in enumerate(data_list):
# Display node color instead of name
if 'node' in vm and vm['node'] and obj_list[i].node:
if not self.request.user.is_admin(self.request):
vm['node'] = obj_list[i].node.color
# Add changed attribute
vm['changed'] = obj_list[i].json_changed()
self._data = data
return self._data
class VmSerializer(VmBaseSerializer):
"""
VM details (read-only)
"""
hostname = s.Field()
uuid = s.CharField(read_only=True)
alias = s.Field()
node = s.SlugRelatedField(slug_field='hostname', read_only=True, required=False)
owner = s.SlugRelatedField(slug_field='username', read_only=True)
status = s.DisplayChoiceField(choices=Vm.STATUS, read_only=True)
node_status = s.DisplayChoiceField(source='node.status', choices=Node.STATUS_DB, read_only=True)
vcpus = s.IntegerField(read_only=True)
ram = s.IntegerField(read_only=True)
disk = s.IntegerField(read_only=True)
ips = s.ArrayField(read_only=True)
uptime = s.IntegerField(source='uptime_actual', read_only=True)
locked = s.BooleanField(read_only=True)
class ExtendedVmSerializer(VmSerializer):
"""
Extended VM details (read-only)
"""
extra_select = frozendict({
'snapshots': '''SELECT COUNT(*) FROM "vms_snapshot" WHERE "vms_vm"."uuid" = "vms_snapshot"."vm_id"''',
'backups': '''SELECT COUNT(*) FROM "vms_backup" WHERE "vms_vm"."uuid" = "vms_backup"."vm_id"''',
'snapshot_define_active': '''SELECT COUNT(*) FROM "vms_snapshotdefine"
LEFT OUTER JOIN "djcelery_periodictask" ON ("vms_snapshotdefine"."periodic_task_id" = "djcelery_periodictask"."id")
WHERE "vms_snapshotdefine"."vm_id" = "vms_vm"."uuid" AND "djcelery_periodictask"."enabled" = True''',
'snapshot_define_inactive': '''SELECT COUNT(*) FROM "vms_snapshotdefine"
LEFT OUTER JOIN "djcelery_periodictask" ON ("vms_snapshotdefine"."periodic_task_id" = "djcelery_periodictask"."id")
WHERE "vms_snapshotdefine"."vm_id" = "vms_vm"."uuid" AND "djcelery_periodictask"."enabled" = False''',
'backup_define_active': '''SELECT COUNT(*) FROM "vms_backupdefine"
LEFT OUTER JOIN "djcelery_periodictask" ON ("vms_backupdefine"."periodic_task_id" = "djcelery_periodictask"."id")
WHERE "vms_backupdefine"."vm_id" = "vms_vm"."uuid" AND "djcelery_periodictask"."enabled" = True''',
'backup_define_inactive': '''SELECT COUNT(*) FROM "vms_backupdefine"
LEFT OUTER JOIN "djcelery_periodictask" ON ("vms_backupdefine"."periodic_task_id" = "djcelery_periodictask"."id")
WHERE "vms_backupdefine"."vm_id" = "vms_vm"."uuid" AND "djcelery_periodictask"."enabled" = False''',
'slaves': '''SELECT COUNT(*) FROM "vms_slavevm" WHERE "vms_vm"."uuid" = "vms_slavevm"."master_vm_id"''',
})
tags = s.TagField(required=False, default=[])
snapshot_define_inactive = s.IntegerField(read_only=True)
snapshot_define_active = s.IntegerField(read_only=True)
snapshots = s.IntegerField(read_only=True)
backup_define_inactive = s.IntegerField(read_only=True)
backup_define_active = s.IntegerField(read_only=True)
backups = s.IntegerField(read_only=True)
slaves = s.IntegerField(read_only=True)
size_snapshots = s.IntegerField(read_only=True)
size_backups = s.IntegerField(read_only=True)
| {
"repo_name": "erigones/esdc-ce",
"path": "api/vm/base/serializers.py",
"copies": "1",
"size": "4230",
"license": "apache-2.0",
"hash": 5872266532632285000,
"line_mean": 41.3,
"line_max": 119,
"alpha_frac": 0.6430260047,
"autogenerated": false,
"ratio": 3.566610455311973,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9702990033959296,
"avg_score": 0.0013292852105355895,
"num_lines": 100
} |
from frozendict import frozendict
from que.tasks import get_task_logger
from api.mon.backends.zabbix.exceptions import (RemoteObjectManipulationError, RemoteObjectDoesNotExist,
RemoteObjectAlreadyExists)
from api.mon.backends.zabbix.containers.base import ZabbixBaseContainer
task_logger = get_task_logger(__name__)
class ZabbixUserGroupContainer(ZabbixBaseContainer):
"""
Container class for the Zabbix UserGroup object.
"""
ZABBIX_ID_ATTR = 'usrgrpid'
FRONTEND_ACCESS_ENABLED_WITH_DEFAULT_AUTH = '0'
FRONTEND_ACCESS_DISABLED = '2'
USERS_STATUS_ENABLED = 0
PERMISSION_DENY = 0
PERMISSION_READ_ONLY = 2
PERMISSION_READ_WRITE = 3
QUERY_BASE = frozendict({'selectUsers': ['alias'], 'selectRights': 'extend'})
QUERY_WITHOUT_USERS = frozendict({'selectRights': 'extend'})
OWNERS_GROUP = '#owner'
NAME_MAX_LENGTH = 64
AFFECTED_USERS = frozendict({
ZabbixBaseContainer.NOTHING: frozenset(),
ZabbixBaseContainer.CREATED: frozenset(),
ZabbixBaseContainer.UPDATED: frozenset(),
ZabbixBaseContainer.DELETED: frozenset(),
})
# noinspection PyUnresolvedReferences
def __init__(self, *args, **kwargs):
super(ZabbixUserGroupContainer, self).__init__(*args, **kwargs)
self.users = set() # type: [ZabbixUserContainer] # noqa: F821
self.hostgroup_ids = set() # type: [int]
self.superuser_group = False
self.affected_users = frozendict({key: set() for key in self.AFFECTED_USERS})
@classmethod
def user_group_name_factory(cls, dc_name, local_group_name):
"""
We have to qualify the dc name to prevent name clashing among groups in different datacenters,
but in the same zabbix.
"""
name = cls.trans_dc_qualified_name(local_group_name, dc_name)
if len(name) > cls.NAME_MAX_LENGTH:
raise ValueError('dc_name + group name should have less than 61 chars, '
'but they have %d instead: %s %s' % (len(name), dc_name, local_group_name))
return name
@classmethod
def from_zabbix_data(cls, zapi, zabbix_object):
from api.mon.backends.zabbix.containers.user import ZabbixUserContainer
container = cls(zabbix_object['name'], zapi=zapi, zabbix_object=zabbix_object)
# container.superuser_group = FIXME cannot determine from this data
container.users = {ZabbixUserContainer.from_zabbix_data(zapi, userdata)
for userdata in zabbix_object.get('users', [])}
container.hostgroup_ids = {int(right['id']) for right in zabbix_object.get('rights', [])}
return container
@classmethod
def from_zabbix_ids(cls, zapi, zabbix_ids, resolve_users=True):
if resolve_users:
query = cls.QUERY_BASE
else:
query = cls.QUERY_WITHOUT_USERS
params = dict(usrgrpids=zabbix_ids, **query)
response = cls.call_zapi(zapi, 'usergroup.get', params=params)
return [cls.from_zabbix_data(zapi, item) for item in response]
@classmethod
def from_zabbix_name(cls, zapi, name, resolve_users=True):
if resolve_users:
query = cls.QUERY_BASE
else:
query = cls.QUERY_WITHOUT_USERS
params = dict(filter={'name': name}, **query)
response = cls.call_zapi(zapi, 'usergroup.get', params=params)
zabbix_object = cls.parse_zabbix_get_result(response)
return cls.from_zabbix_data(zapi, zabbix_object)
@classmethod
def from_mgmt_data(cls, zapi, dc_name, group_name, users, superusers=False):
from api.mon.backends.zabbix.containers.user import ZabbixUserContainer
from api.mon.backends.zabbix.containers.host_group import ZabbixHostGroupContainer
# I should probably get all existing user ids for user names, and hostgroup ids for hostgroup names
container = cls(group_name, zapi=zapi)
container.users = {ZabbixUserContainer.from_mgmt_data(zapi, user) for user in users}
container.hostgroup_ids = {zgc.zabbix_id for zgc in ZabbixHostGroupContainer.all(zapi, dc_name)}
container.superuser_group = superusers # FIXME this information is not used anywhere by now
return container
@classmethod
def all(cls, zapi, dc_name, resolve_users=True):
if resolve_users:
query = cls.QUERY_BASE
else:
query = cls.QUERY_WITHOUT_USERS
params = dict(
search={'name': cls.TEMPLATE_NAME_WITH_DC_PREFIX.format(dc_name=dc_name, name='*')},
searchWildcardsEnabled=True,
**query
)
response = cls.call_zapi(zapi, 'usergroup.get', params=params)
return [cls.from_zabbix_data(zapi, item) for item in response]
@classmethod
def synchronize(cls, zapi, dc_name, group_name, users, superusers=False):
"""
Make sure that in the end, there will be a user group with specified users in zabbix.
:param group_name: should be the qualified group name (<DC>:<group name>:)
"""
# TODO synchronization of superadmins should be in the DC settings
user_group = cls.from_mgmt_data(zapi, dc_name, group_name, users, superusers=superusers)
try:
zabbix_user_group = cls.from_zabbix_name(zapi, group_name, resolve_users=True)
except RemoteObjectDoesNotExist:
# We create it
return user_group.create()
else:
# Otherwise we update it
return zabbix_user_group.update_from(user_group)
@classmethod
def delete_by_name(cls, zapi, name):
# for optimization: z.zapi.usergroup.get({'search': {'name': ":dc_name:*"}, 'searchWildcardsEnabled': True})
try:
group = cls.from_zabbix_name(zapi, name)
except RemoteObjectDoesNotExist:
return cls.NOTHING, cls.AFFECTED_USERS
else:
return group.delete()
@classmethod
def _generate_hostgroup_rights(cls, hostgroup_ids):
return [{
'id': hostgroup_zabbix_id,
'permission': cls.PERMISSION_READ_ONLY,
} for hostgroup_zabbix_id in hostgroup_ids]
def delete(self):
task_logger.debug('Going to delete group %s', self.name)
task_logger.debug('Group.users before: %s', self.users)
users_to_remove = self.users.copy() # We have to copy it because group.users will get messed up
self.remove_users(users_to_remove, delete_users_if_last=True) # remove all users
task_logger.debug('Group.users after: %s', self.users)
self._api_response = self._call_zapi('usergroup.delete', params=[self.zabbix_id])
self.parse_zabbix_delete_result(self._api_response, 'usrgrpids')
self.reset()
return self.DELETED, self.affected_users
def create(self):
assert not self.zabbix_id, \
'%s has the zabbix_id already and therefore you should try to update the object, not create it.' % self
user_group_object = {
'name': self.name,
'users_status': self.USERS_STATUS_ENABLED,
'gui_access': self.FRONTEND_ACCESS_DISABLED,
'rights': self._generate_hostgroup_rights(self.hostgroup_ids),
}
task_logger.debug('Creating usergroup: %s', user_group_object)
self._api_response = self._call_zapi('usergroup.create', params=user_group_object)
self.zabbix_id = int(self.parse_zabbix_create_result(self._api_response, 'usrgrpids'))
user_group_object['userids'] = []
self._refetch_users()
self._push_current_users()
return self.CREATED, self.affected_users
def _refresh_users(self):
from api.mon.backends.zabbix.containers.user import ZabbixUserContainer
self.users = {
ZabbixUserContainer.from_zabbix_data(self._zapi, userdata)
for userdata in self.zabbix_object.get('users', [])
}
def refresh(self):
params = dict(usrgrpids=self.zabbix_id, **self.QUERY_BASE)
self._api_response = self._call_zapi('usergroup.get', params=params)
zabbix_object = self.parse_zabbix_get_result(self._api_response)
self.init(zabbix_object)
self._refresh_users()
def update_users(self, user_group):
task_logger.debug('synchronizing %s', self)
task_logger.debug('remote_user_group.users %s', self.users)
task_logger.debug('source_user_group.users %s', user_group.users)
redundant_users = self.users - user_group.users
task_logger.debug('redundant_users: %s', redundant_users)
missing_users = user_group.users - self.users
task_logger.debug('missing users: %s', missing_users)
self.remove_users(redundant_users, delete_users_if_last=True)
self.add_users(missing_users)
def set_hostgroup_rights(self, new_hostgroup_ids):
task_logger.debug('setting usergroup %s hostgroups rights to: %s', self, new_hostgroup_ids)
params = dict(usrgrpid=self.zabbix_id, rights=self._generate_hostgroup_rights(new_hostgroup_ids))
self._api_response = self._call_zapi('usergroup.update', params=params)
self.parse_zabbix_update_result(self._api_response, 'usrgrpids')
self.hostgroup_ids = new_hostgroup_ids
def add_hostgroup_right(self, hostgroup_id):
if hostgroup_id not in self.hostgroup_ids:
hostgroup_ids = self.hostgroup_ids.copy()
hostgroup_ids.add(hostgroup_id)
self.set_hostgroup_rights(hostgroup_ids)
def update_from(self, user_group):
self.update_users(user_group)
if self.hostgroup_ids != user_group.hostgroup_ids:
self.set_hostgroup_rights(user_group.hostgroup_ids)
return self.UPDATED, self.affected_users
def _refetch_users(self):
for user in self.users:
user.renew_zabbix_id()
user.groups.add(self)
if not user.zabbix_id:
try:
user.create()
except RemoteObjectAlreadyExists:
user.renew_zabbix_id()
else:
self.affected_users[self.CREATED].add(user.name)
def add_users(self, new_users):
self.users.update(new_users)
self._refetch_users()
self._push_current_users()
def _push_current_users(self):
self._call_zapi('usergroup.update', params={
'usrgrpid': self.zabbix_id,
'userids': [user.zabbix_id for user in self.users]
})
self.affected_users[self.UPDATED].update(user.name for user in self.users)
def remove_user(self, user, delete_user_if_last=False):
user.refresh()
if self not in user.groups:
task_logger.warn('User is not in the group: %s %s (possible race condition)', self, user.groups)
if not user.groups - {self} and not delete_user_if_last:
raise RemoteObjectManipulationError('Cannot remove the last group (%s) '
'without deleting the user %s itself!' % (self, user))
user.groups -= {self}
if user.groups:
user.update_group_membership()
res = self.UPDATED
else:
user.delete()
res = self.DELETED
self.affected_users[res].add(user.name)
return res
def remove_users(self, redundant_users, delete_users_if_last): # TODO move
self.users -= redundant_users
# Some zabbix users have to be deleted as this is their last group. We have to go the slower way.
for user in redundant_users:
self.remove_user(user, delete_user_if_last=delete_users_if_last)
# TODO create also a faster way of removal for users that have also different groups
@property
def name_without_dc_prefix(self):
return self.trans_dc_qualified_name(self.name, from_zabbix=True)
@property
def as_mgmt_data(self):
return {'id': self.zabbix_id, 'name': self.name_without_dc_prefix}
| {
"repo_name": "erigones/esdc-ce",
"path": "api/mon/backends/zabbix/containers/user_group.py",
"copies": "1",
"size": "12151",
"license": "apache-2.0",
"hash": 1005228748668497700,
"line_mean": 40.0506756757,
"line_max": 116,
"alpha_frac": 0.6334458069,
"autogenerated": false,
"ratio": 3.754944375772559,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9882388410156528,
"avg_score": 0.0012003545032059036,
"num_lines": 296
} |
from frt_server.common import CommonColumns, Base
from sqlalchemy import Table, Column, Integer, ForeignKey, String
from sqlalchemy.orm import relationship
class Tag(CommonColumns):
__tablename__ = 'tag'
text = Column(String(200))
type = Column(String(100))
tag_font_association_table = Table('tag_font_association', Base.metadata,
Column('tag_id', Integer, ForeignKey('tag._id')),
Column('font_id', Integer, ForeignKey('font._id')))
tag_family_association_table = Table('tag_family_association', Base.metadata,
Column('tag_id', Integer, ForeignKey('tag._id')),
Column('family_id', Integer, ForeignKey('family._id')))
tag_thread_association_table = Table('tag_thread_association', Base.metadata,
Column('tag_id', Integer, ForeignKey('tag._id')),
Column('thread_id', Integer, ForeignKey('thread._id')))
tag_sample_text_association_table = Table('tag_sample_text_association', Base.metadata,
Column('tag_id', Integer, ForeignKey('tag._id')),
Column('sample_text_id', Integer, ForeignKey('sample_text._id')))
| {
"repo_name": "HPI-SWA-Lab/BP2016H1",
"path": "frt_server/tag.py",
"copies": "1",
"size": "1055",
"license": "mit",
"hash": -4180132482261220000,
"line_mean": 42.9583333333,
"line_max": 87,
"alpha_frac": 0.7071090047,
"autogenerated": false,
"ratio": 3.7279151943462896,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.49350241990462895,
"avg_score": null,
"num_lines": null
} |
from frt_server.tests import TestMinimal
from frt_server.tables import Family, Font
from sqlalchemy.orm import joinedload
import json
import os
class OtfDownloadingTestCase(TestMinimal):
def setUp(self):
super().setUp()
self.login_as('eve@evil.com', 'eveisevil')
family = Family(family_name='Riblon')
session = self.connection.session
session.add(family)
session.commit()
session.refresh(family)
self.family_id = family._id
self.upload_font_file(self.family_id, 'testFiles/RiblonSans/RiblonSans.ufo.zip')
family = session.query(Family).get(self.family_id)
self.font_id = family.fonts[0]._id
def test_download_otf(self):
session = self.connection.session
otf_path = session.query(Font).get(self.font_id).otf_file_path()
with open(otf_path, 'rb') as otf_file:
otf_contents = otf_file.read()
response = self.download('/font/{}/otf'.format(self.font_id))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data, otf_contents)
| {
"repo_name": "HPI-SWA-Lab/BP2016H1",
"path": "frt_server/tests/test_otf_downloading.py",
"copies": "1",
"size": "1109",
"license": "mit",
"hash": -6920600139095400000,
"line_mean": 28.1842105263,
"line_max": 88,
"alpha_frac": 0.6546438233,
"autogenerated": false,
"ratio": 3.4123076923076923,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.45669515156076923,
"avg_score": null,
"num_lines": null
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.