commit
stringlengths
40
40
subject
stringlengths
1
3.25k
old_file
stringlengths
4
311
new_file
stringlengths
4
311
old_contents
stringlengths
0
26.3k
lang
stringclasses
3 values
proba
float64
0
1
diff
stringlengths
0
7.82k
ded8db519b1534d2f2af362dc9da8c8b2d6b4bf0
Create Default Notebook for All Users
app/auth/views.py
app/auth/views.py
from flask import render_template, redirect, request, url_for, flash from flask.ext.login import login_user, logout_user, login_required, current_user from . import auth from app import db from ..models import User from ..email import send_email from .forms import * @auth.before_app_request def before_request(): if current_user.is_authenticated() \ and not current_user.confirmed \ and request.endpoint[:5] != 'auth.' \ and request.endpoint != 'static': return redirect(url_for('auth.unconfirmed')) @auth.route('/login', methods=['GET', 'POST']) def login(): form = LoginForm() if form.validate_on_submit(): user = User.query.filter_by(email=form.email.data.lower()).first() if user is not None and user.verify_password(form.password.data): login_user(user, form.remember_me.data) return redirect(request.args.get('next') or url_for('main.index')) flash('Invalid username or password') return render_template('auth/login.html', form=form) @auth.route('/logout') @login_required def logout(): logout_user() flash('You have been logged out.') return redirect(url_for('main.index')) @auth.route('/register', methods=['GET', 'POST']) def register(): form = RegistrationForm() if form.validate_on_submit(): user = User(email=form.email.data.lower(), username=form.username.data, password=form.password.data) db.session.add(user) db.session.commit() token = user.generate_confirmation_token() send_email(user.email, 'Confirm Your Account', 'auth/email/confirm', user=user, token=token) flash('A confirmation email has been sent.') return redirect(url_for('main.index')) return render_template('auth/register.html', form=form) @auth.route('/confirm') @login_required def resend_confirmation(): token = current_user.generate_confirmation_token() send_email(current_user.email, 'Confirm Your Account', '/auth/email/confirm', user=current_user, token=token) flash('A new confirmation email has been sent.') return redirect(url_for('main.index')) @auth.route('/confirm/<token>') @login_required def confirm(token): if current_user.confirmed: return redirect(url_for('main.index')) if current_user.confirm(token): flash('You have confirmed your account. Thanks!') else: flash('The confirmation link is invalid or has expired.') return redirect(url_for('main.index')) @auth.route('/unconfirmed') def unconfirmed(): if current_user.is_anonymous() or current_user.confirmed: return redirect(url_for('main.index')) return render_template('auth/unconfirmed.html') @auth.route('/change-password', methods=['GET', 'POST']) @login_required def change_password(): form = ChangePasswordForm() if form.validate_on_submit(): if current_user.verify_password(form.old_password.data): current_user.password = form.password.data db.session.add(current_user) db.session.commit() flash('Your password has been updated.') return redirect(url_for('main.index')) else: flash('Invalid password.') return render_template("auth/change_password.html", form=form) @auth.route('/reset', methods=['GET', 'POST']) def password_reset_request(): if not current_user.is_anonymous: return redirect(url_for('main.index')) form = PasswordResetRequestForm() if form.validate_on_submit(): user = User.query.filter_by(email=form.email.data).first() if user: token = user.generate_reset_token() send_email(user.email, 'Reset Your Password', 'auth/email/reset_password', user=user, token=token, next=request.args.get('next')) flash('An email with instructions to reset your password has been ' 'sent to you.') return redirect(url_for('auth.login')) return render_template('auth/reset_password.html', form=form) @auth.route('/reset/<token>', methods=['GET', 'POST']) def password_reset(token): if not current_user.is_anonymous: return redirect(url_for('main.index')) form = PasswordResetForm() if form.validate_on_submit(): user = User.query.filter_by(email=form.email.data).first() if user is None: return redirect(url_for('main.index')) if user.reset_password(token, form.password.data): flash('Your password has been updated.') return redirect(url_for('auth.login')) else: return redirect(url_for('main.index')) return render_template('auth/reset_password.html', form=form) @auth.route('/change-email', methods=['GET', 'POST']) @login_required def change_email_request(): form = ChangeEmailForm() if form.validate_on_submit(): if current_user.verify_password(form.password.data): new_email = form.email.data token = current_user.generate_email_change_token(new_email) send_email(new_email, 'Confirm your email address', 'auth/email/change_email', user=current_user, token=token) flash('An email with instructions to confirm your new email ' 'address has been sent to you.') return redirect(url_for('main.index')) else: flash('Invalid email or password.') return render_template("auth/change_email.html", form=form) @auth.route('/change-email/<token>') @login_required def change_email(token): if current_user.change_email(token): flash('Your email address has been updated.') else: flash('Invalid request.') return redirect(url_for('main.index'))
Python
0
@@ -1476,24 +1476,136 @@ n.add(user)%0A + default_notebook = Notebook(title='Default',author_id=user.id)%0A db.session.add(default_notebook)%0A db.s
58bab9291c85edc3f13d3dc0659eff3c17201eb1
Improve pixelcnn namings and comments
eva/models/pixelcnn.py
eva/models/pixelcnn.py
from keras.models import Model from keras.layers import Input, Convolution2D, Activation, Flatten, Dense, Reshape, Lambda from keras.layers.advanced_activations import PReLU from keras.engine.topology import merge from keras.optimizers import Nadam import keras.backend.tensorflow_backend as K from eva.layers.residual_block import ResidualBlockList from eva.layers.masked_convolution2d import MaskedConvolution2D def PixelCNN(input_shape, filters, blocks, build=True): width, height, channels = input_shape input_map = Input(shape=input_shape, name='input_map') model = MaskedConvolution2D(filters, 7, 7, mask='A', border_mode='same')(input_map) model = ResidualBlockList(model, filters, blocks) model = PReLU()(model) model = MaskedConvolution2D(filters, 1, 1)(model) model = PReLU()(model) model = MaskedConvolution2D(3*256, 1, 1)(model) # TODO: Make it scalable to any amount of channels. model = Reshape((input_shape[0], input_shape[1], 256, input_shape[2]))(model) # TODO: Make it scalable to any amount of channels. red = Lambda(lambda x: x[:, :, :, :, 0])(model) red = Reshape((input_shape[0] * input_shape[1], 256))(red) red = Activation('softmax', name='red')(red) green = Lambda(lambda x: x[:, :, :, :, 1])(model) green = Reshape((input_shape[0] * input_shape[1], 256))(green) green = Activation('softmax', name='green')(green) blue = Lambda(lambda x: x[:, :, :, :, 2])(model) blue = Reshape((input_shape[0] * input_shape[1], 256))(blue) blue = Activation('softmax', name='blue')(blue) # TODO: Make is scalable to any amount of channels. if build: model = Model(input=input_map, output=[red, green, blue]) model.compile(optimizer=Nadam(), loss={ 'red': 'sparse_categorical_crossentropy', 'green': 'sparse_categorical_crossentropy', 'blue': 'sparse_categorical_crossentropy'}) return model
Python
0
@@ -508,16 +508,72 @@ _shape%0A%0A + # TODO: Make it scalable to any amount of channels.%0A inpu @@ -699,16 +699,35 @@ e='same' +, name='masked2d_A' )(input_ @@ -766,15 +766,8 @@ ist( -model, filt @@ -778,16 +778,23 @@ blocks) +(model) %0A mod @@ -943,74 +943,46 @@ 1, 1 -)(model)%0A%0A # TODO: Make it scalable to any amount of channels.%0A +, name='channels_mult_palette')(model) %0A @@ -1051,16 +1051,41 @@ hape%5B2%5D) +, name='palette_channels' )(model) @@ -1186,16 +1186,36 @@ :, :, 0%5D +, name='red_extract' )(model) @@ -1271,16 +1271,39 @@ 1%5D, 256) +, name='hw_red-palette' )(red)%0A @@ -1396,16 +1396,38 @@ :, :, 1%5D +, name='green_extract' )(model) @@ -1485,16 +1485,41 @@ 1%5D, 256) +, name='hw_green-palette' )(green) @@ -1619,16 +1619,37 @@ :, :, 2%5D +, name='blue_extract' )(model) @@ -1706,16 +1706,40 @@ 1%5D, 256) +, name='hw_blue-palette' )(blue)%0A
79b258dedda23c9130dda6d4d674f077ac52683e
Add select field for food type
app/main/forms.py
app/main/forms.py
import datetime from dateutil.parser import parse from flask.ext.wtf import Form from wtforms import StringField, TextAreaField, BooleanField, SelectField,\ DateTimeField, SubmitField from wtforms.validators import Required, Length, Email from wtforms import ValidationError from ..models import User, Event, Location from autocomplete.forms import AutocompleteField def get_loc_by_id(id): loc = Location.query.filter_by(id=id).first() return loc class EditProfileForm(Form): text_updates = BooleanField('Send notifications through text') phone = StringField('Phone Number (To recieve event notifications)') location = AutocompleteField('School', url='auth.autocomplete', get_label='name', getter=get_loc_by_id, validators=[Required()] ) submit = SubmitField('Submit') def validate_phone(self, field): if field.data != '' and User.query.filter_by(phone=num).first(): raise ValidationError('That number is already in use.') class MakeEventForm(Form): name = StringField('What is the event?', validators=[Required()]) serving = StringField('What is being offered?', default='Pizza!') place = StringField('Where is this happening (Building/room)?', validators=[Required()]) now = datetime.datetime.now()#.strftime('%m-%d %H:%M') time = DateTimeField('When is this happening?', default=now, format='%m/%d %I:%M%p') body = StringField('Anything else we should know?') submit = SubmitField('Submit') def validate_time(self, field): pass #if field.data < datetime.datetime.now(): # raise ValidationError('Time must be in the future') class SchoolSearchForm(Form): location = AutocompleteField( url='main.autocomplete', placeholder='Your school...', get_label='name', getter=get_loc_by_id, validators=[Required()] )
Python
0.000001
@@ -1083,24 +1083,499 @@ )%0A -serving = String +food_types = %5B(%22Fruit%22,%22Fruit%22), (%22Lemonade%22,%22Lemonade%22), (%22Breakfast%22,%22Breakfast%22), (%22Meat%22,%22Meat%22), (%22Sausage%22,%22Sausage%22), (%22Hot dogs%22,%22Hot dogs%22),%0A (%22Burgers%22,%22Burgers%22), (%22Candy%22,%22Candy%22), (%22Ice cream%22,%22Ice cream%22), (%22Drinks%22,%22Drinks%22), (%22Soup%22,%22Soup%22), (%22Alcohol%22,%22Alcohol%22), (%22Pizza%22,%22Pizza%22),%0A (%22Chicken%22,%22Chicken%22), (%22Fish%22,%22Fish%22), (%22Cake%22,%22Cake%22), (%22BBQ%22,%22BBQ%22), (%22Formal dinner%22,%22Formal dinner%22), (%22Smoothie%22,%22Smoothie%22), (%22Coffee%22,%22Coffee%22),%0A (%22Tea%22,%22Tea%22)%5D%0A serving = Select Fiel @@ -1606,24 +1606,26 @@ ?', -default='Pizza!' +choices=food_types )%0A
82394b0312c880402b8dcf28a4c973c2e017e3ff
add featured flag to json repr
proso_subscription/models.py
proso_subscription/models.py
from datetime import datetime from dateutil.relativedelta import relativedelta from django.contrib.auth.models import User from django.db import models from django.db.models.signals import pre_save from django.dispatch import receiver from django.utils.timezone import now from gopay.enums import PaymentStatus from gopay_django_api.models import Payment from gopay_django_api.signals import payment_changed from proso.django.models import disable_for_loaddata from proso_user.models import Session import uuid class SubscriptionPlanManager(models.Manager): def prepare_related(self): return self.prefetch_related('descriptions') class SubscriptionPlan(models.Model): identifier = models.SlugField() months_validity = models.IntegerField() type = models.CharField(max_length=255) active = models.BooleanField(default=True) featured = models.BooleanField(default=False) objects = SubscriptionPlanManager() def to_json(self, nested=False, lang=None): result = { 'identifier': self.identifier, 'id': self.id, 'object_type': 'subscription_plan', 'type': self.type, 'active': self.active, 'months-validity': self.months_validity, } if not nested: if lang is None: result['descriptions'] = [d.to_json(nested=True) for d in self.descriptions.all()] else: result['description'] = [d.to_json(nested=True) for d in self.descriptions.all() if d.lang == lang][0] return result class SubscriptionPlanDescriptionManager(models.Manager): def prefetch_related(self): return self.select_related('plan') class SubscriptionPlanDescription(models.Model): plan = models.ForeignKey(SubscriptionPlan, related_name='descriptions') lang = models.CharField(max_length=2) name = models.TextField() description = models.TextField() price = models.IntegerField() currency = models.CharField(max_length=10) objects = SubscriptionPlanDescriptionManager() def to_json(self, nested=False): result = { 'id': self.id, 'object_type': 'subscription_plan_description', 'lang': self.lang, 'price': self.price, 'currency': self.currency, 'description': self.description, 'name': self.name, } if nested: result['plan_id'] = self.plan_id else: result['plan'] = self.plan.to_json(nested=True) return result class SubscriptionManager(models.Manager): def prepare_related(self): return self.select_related('payment', 'plan_description', 'plan_description__plan') def subscribe(self, user, plan_description, return_url): payment = Payment.objects.create_single_payment( Payment.objects.create_contact(email=user.email), order_number=str(uuid.uuid1()), order_description=plan_description.description, order_items={ plan_description.name: plan_description.price, }, currency=plan_description.currency, amount=plan_description.price, return_url=return_url ) return self.create( plan_description=plan_description, payment=payment, user=user ) class Subscription(models.Model): plan_description = models.ForeignKey(SubscriptionPlanDescription) payment = models.ForeignKey(Payment) user = models.ForeignKey(User) expiration = models.DateTimeField(default=now) created = models.DateTimeField(auto_now_add=True) session = models.ForeignKey(Session, null=True, blank=True, default=None) objects = SubscriptionManager() def to_json(self, nested=False): result = { 'expiration': self.expiration.strftime('%Y-%m-%d %H:%M:%S'), 'created': self.created.strftime('%Y-%m-%d %H:%M:%S'), 'user_id': self.user_id, 'id': self.id, 'object_type': 'subscription_subscription', } if nested: result['payment_id'] = self.payment_id result['plan_description_id'] = self.plan_description_id result['session_id'] = self.session_id else: result['payment'] = { 'id': self.payment.id, 'object_type': 'payment', 'state': self.payment.state, 'status': self.payment.status, } result['plan_description'] = self.plan_description.to_json() if self.session is not None: result['session'] = self.session.to_json(nested=True) return result @receiver(payment_changed) def update_subcription_payment(sender, instance, previous_status, **kwargs): if previous_status['state'] == PaymentStatus.PAID or instance.state != PaymentStatus.PAID: return subscription = Subscription.objects.select_related('plan_description__plan').get(payment=instance) subscription.expiration = datetime.now() + relativedelta(months=subscription.plan_description.plan.months_validity) subscription.save() @receiver(pre_save, sender=Subscription) @disable_for_loaddata def init_session(sender, instance, **kwargs): if instance.session_id is None: instance.session_id = Session.objects.get_current_session_id()
Python
0
@@ -1158,16 +1158,55 @@ f.type,%0A + 'featured': self.featured,%0A
41b3bfdbaa7540e85d74aa6d248e6214f17fe432
Converted to Python 3 code using 2to3 command
django-os2webscanner/os2webscanner/migrations/0003_organization_do_notify_all_scans.py
django-os2webscanner/os2webscanner/migrations/0003_organization_do_notify_all_scans.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('os2webscanner', '0002_auto_20160401_0817'), ] operations = [ migrations.AddField( model_name='organization', name='do_notify_all_scans', field=models.BooleanField(default=True), ), ]
Python
0.999973
@@ -21,47 +21,8 @@ -*-%0A -from __future__ import unicode_literals %0A%0Afr
724fae9315a8dd517ba486804bc82603e89cbc49
Add comment for dimension label offset
examples/image_test.py
examples/image_test.py
from pylab import * from pyspecdata import * import matplotlib.lines as lines # Generate fake data t_axis = nddata(r_[0:2:2048j],'t2') s = exp(1j*2*pi*5*t_axis - t_axis/800e-3 ) s += exp(1j*2*pi*-30*t_axis - t_axis/800e-3) ph1 = nddata(r_[0:4]/4.,'ph1') ph2 = nddata(r_[0,2]/4.,'ph2') # this cannot start at 0 since we multiply s by it repeats = nddata(r_[1:6],'repeats') s *= repeats/repeats s.add_noise(0.3) s *= exp(1j*2*pi*ph1) s *= exp(1j*2*pi*ph2) #s['t2',0] *= 0.5 #s.ft('t2',shift=True) #s.ft(['ph1','ph2']) s.reorder(['repeats','t2'],first=False) print(ndshape(s)) grid_bottom = 0.2 grid_top = 0.8 total_spacing = 0.2 a_shape = ndshape(s) divisions = [] for j,thisdim in enumerate(a_shape.dimlabels[:-2]): old = [j/2.0 for j in divisions] divisions = (old + [1])*(a_shape[thisdim]-1)+old print("for",thisdim,"I get",divisions) divisions = [j*total_spacing/sum(divisions) for j in divisions] axes_height = (grid_top-grid_bottom-total_spacing)/prod(a_shape.shape[:-2]) axes_bottom = np.cumsum([axes_height+j for j in divisions]) # becomes ndarray axes_bottom = r_[0,axes_bottom] axes_bottom += grid_bottom axes_top = grid_bottom + grid_top fig = figure() ax_list = [] yMajorLocator = lambda: mticker.MaxNLocator(steps=[1,2,5,10]) majorLocator = lambda: mticker.MaxNLocator(min_n_ticks=4, steps=[1,2,5,10]) minorLocator = lambda: mticker.AutoMinorLocator(n=5) for j,b in enumerate(axes_bottom): ax_list.append(axes([0.2,b,0.7,axes_height])) # lbwh if j == 0: #ax_list[-1].set_xlabel(a_shape.dimlabels[-1]) ax_list[-1].xaxis.set_major_locator(majorLocator()) ax_list[-1].xaxis.set_minor_locator(minorLocator()) elif (j == len(axes_bottom)-1): ax_list[-1].xaxis.set_major_locator(majorLocator()) ax_list[-1].set_xlabel(None) #for the minor ticks, use no labels; default NullFormatter ax_list[-1].xaxis.set_minor_locator(minorLocator()) ax_list[-1].xaxis.tick_top() labels = [item.get_text() for item in ax_list[-1].get_xticklabels()] empty_string_labels = ['']*len(labels) ax_list[-1].set_xticklabels(empty_string_labels) ax_list[-1].set_xlabel(None) else: ax_list[-1].xaxis.set_ticks([]) ax_list[-1].get_xaxis().set_visible(False) ax_list[-1].set_xlabel(None) ax_list[-1].set_ylabel(a_shape.dimlabels[-2]) ax_list[-1].yaxis.set_minor_locator(minorLocator()) ax_list[-1].yaxis.set_ticks_position('both') A = s.smoosh(a_shape.dimlabels[:-2],'smooshed',noaxis=True) A.reorder('smooshed',first=True) for j in range(len(ax_list)): image(A['smooshed',j],ax=ax_list[j]) if not j == 0: ax_list[j].set_xlabel(None) # to drop into ax_list, just do # A.smoosh(a_shape.dimlabels, 'smooshed', noaxis=True) # in ax_list[0] put A['smooshed',0], etc idx = nddata(r_[0:prod(a_shape.shape[:-2])],[-1],['smooshed']) idx.chunk('smooshed',a_shape.dimlabels[:-2],a_shape.shape[:-2]) def draw_span(ax1, ax2, label, this_label_num, allow_for_text=10, allow_for_ticks=50): x1,y1 = ax1.transAxes.transform(r_[0,1]) x2,y2 = ax2.transAxes.transform(r_[0,0]) x1-=allow_for_ticks x_text = x1-allow_for_ticks x2-=allow_for_ticks label_spacing = this_label_num*40 x1,y1 = fig.transFigure.inverted().transform(r_[x1+label_spacing,y1]) x_text,_ = fig.transFigure.inverted().transform(r_[x_text+label_spacing,0]) x2,y2 = fig.transFigure.inverted().transform(r_[x2+label_spacing,y2]) lineA = lines.Line2D([x1,x2],[y1,y2], linewidth=3, color='r', transform=fig.transFigure, clip_on=False) text(x_text, (y2+y1)/2, label, va='center', ha='right', rotation=90, transform=fig.transFigure, color='r') fig.add_artist(lineA) dim_index_list_rev = r_[0:len(a_shape.dimlabels[:-2])][::-1] for dim_index,thisdim in enumerate(a_shape.dimlabels[:-2]): # generate labels for the dimensions, outside in # use definition of idx in code this_dim_index = dim_index_list_rev[dim_index] for j in range(a_shape[thisdim]): first_axes = ax_list[idx[thisdim,j].data.ravel()[0]] last_axes = ax_list[idx[thisdim,j].data.ravel()[-1]] print(first_axes) print(last_axes) draw_span(first_axes,last_axes,"%s=%d"%(thisdim,j), this_label_num=this_dim_index) show();quit()
Python
0
@@ -3194,24 +3194,96 @@ w_for_ticks%0A + # following line to create an offset for different dimension labels%0A label_sp @@ -4231,59 +4231,8 @@ 1%5D%5D%0A - print(first_axes)%0A print(last_axes)%0A
060b124888953d302d8611c459f978ef9201ae96
update g95 version pattern
numpy/distutils/fcompiler/g95.py
numpy/distutils/fcompiler/g95.py
# http://g95.sourceforge.net/ import os import sys from numpy.distutils.cpuinfo import cpu from numpy.distutils.fcompiler import FCompiler class G95FCompiler(FCompiler): compiler_type = 'g95' version_pattern = r'G95 \((GCC (?P<gccversion>[\d.]+)|.*?) \(g95!\) (?P<version>.*)\).*' # $ g95 --version # G95 (GCC 4.0.3 (g95!) May 22 2006) executables = { 'version_cmd' : ["g95", "--version"], 'compiler_f77' : ["g95", "-ffixed-form"], 'compiler_fix' : ["g95", "-ffixed-form"], 'compiler_f90' : ["g95"], 'linker_so' : ["g95","-shared"], 'archiver' : ["ar", "-cr"], 'ranlib' : ["ranlib"] } pic_flags = ['-fpic'] module_dir_switch = '-fmod=' module_include_switch = '-I' def get_flags(self): return ['-fno-second-underscore'] def get_flags_opt(self): return ['-O'] def get_flags_debug(self): return ['-g'] if __name__ == '__main__': from distutils import log log.set_verbosity(2) from numpy.distutils.fcompiler import new_fcompiler #compiler = new_fcompiler(compiler='g95') compiler = G95FCompiler() compiler.customize() print compiler.get_version()
Python
0
@@ -193,16 +193,17 @@ = 'g95'%0A +# vers @@ -287,17 +287,16 @@ *)%5C).*'%0A -%0A # $ @@ -351,16 +351,195 @@ 2006)%0A%0A + version_pattern = r'G95 %5C((GCC (?P%3Cgccversion%3E%5B%5Cd.%5D+)%7C.*?) %5C(g95 (?P%3Cversion%3E.*)!%5C) (?P%3Cdate%3E.*)%5C).*'%0A # $ g95 --version%0A # G95 (GCC 4.0.3 (g95 0.90!) Aug 22 2006)%0A %0A%0A exec
594da79c98f360331613f786918f08957c39d13c
apply train_hook.gradients()
hypergan/trainers/simultaneous_trainer.py
hypergan/trainers/simultaneous_trainer.py
import tensorflow as tf import numpy as np import hyperchamber as hc import inspect from hypergan.trainers.base_trainer import BaseTrainer TINY = 1e-12 class SimultaneousTrainer(BaseTrainer): """ Steps G and D simultaneously """ def _create(self): gan = self.gan config = self.config if hasattr(self, 'loss'): loss = self.loss else: loss = self.gan.loss d_loss, g_loss = loss.sample self.d_log = -tf.log(tf.abs(d_loss+TINY)) self.d_loss = d_loss self.g_loss = g_loss self.step_ops = None config.optimizer["loss"] = loss.sample self.optimizer = self.gan.create_optimizer(config.optimizer) d_vars = self.d_vars or self.gan.d_vars() g_vars = self.g_vars or self.gan.g_vars() if self.gan.distribution_strategy is not None: return d_grads = tf.gradients(d_loss, d_vars) g_grads = tf.gradients(g_loss, g_vars) apply_vec = list(zip((d_grads + g_grads), (d_vars + g_vars))).copy() for grad, v in apply_vec: if grad is None: print("Gradient is None:", v) self.gan.gradient_mean = sum([tf.reduce_mean(tf.abs(grad)) for grad in d_grads+g_grads])/len(d_grads+g_grads) self.g_loss = g_loss self.d_loss = d_loss self.gan.trainer = self self.optimize_t = self.optimizer.apply_gradients(apply_vec) def required(self): return "".split() def _step(self, feed_dict): gan = self.gan sess = gan.session config = self.config loss = gan.loss metrics = gan.metrics() d_loss, g_loss = loss.sample self.before_step(self.current_step, feed_dict) if self.step_ops is None: ops = [self.optimize_t] update_train_hooks = [t.update_op() for t in self.train_hooks] update_train_hooks = [op for op in update_train_hooks if op is not None] self.step_ops = ops + update_train_hooks sess.run(self.step_ops, feed_dict) self.after_step(self.current_step, feed_dict) if self.current_step % 10 == 0: metric_values = self.gan.session.run(self.output_variables(metrics)) self.print_metrics(self.current_step) def print_metrics(self, step): metrics = self.gan.metrics() metric_values = self.gan.session.run(self.output_variables(metrics)) print(str(self.output_string(metrics) % tuple([step] + metric_values)))
Python
0
@@ -1176,116 +1176,171 @@ -self.gan.gradient_mean = sum(%5Btf.reduce_mean(tf.abs(grad)) for grad in +for t in self.train_hooks:%0A d_grads, g_grads = t.gradients(d_grads, g_grads)%0A apply_vec = list(zip(( d_grads -+ + + g_grads -%5D)/len(d_grads+g_grads +), (d_vars + g_vars))).copy( )%0A
adfe91d2f6066d8f28aeca9574465be452fcd20e
Correct excel file extension
octoprint_printhistory/export.py
octoprint_printhistory/export.py
# coding=utf-8 __author__ = "Jarek Szczepanski <imrahil@imrahil.com>" __license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html" __copyright__ = "Copyright (C) 2014 Jarek Szczepanski - Released under terms of the AGPLv3 License" def exportHistoryData(self, exportType): import flask import csv import StringIO history_dict = self._getHistoryDict() if history_dict is not None: si = StringIO.StringIO() headers = ['File name', 'Timestamp', 'Success', 'Print time', 'Filament length', 'Filament volume'] if exportType == 'csv': writer = csv.writer(si, quoting=csv.QUOTE_ALL) writer.writerow(headers) for historyDetails in history_dict: output = list() output.append(historyDetails["fileName"] if "fileName" in historyDetails and historyDetails["fileName"] is not None else "-") output.append(historyDetails["timestamp"] if "timestamp" in historyDetails and historyDetails["timestamp"] is not None else "-") output.append(historyDetails["success"] if "success" in historyDetails and historyDetails["success"] is not None else "-") output.append(historyDetails["printTime"] if "printTime" in historyDetails and historyDetails["printTime"] is not None else "-") output.append(historyDetails["filamentLength"] if "filamentLength" in historyDetails and historyDetails["filamentLength"] is not None else "-") output.append(historyDetails["filamentVolume"] if "filamentVolume" in historyDetails and historyDetails["filamentVolume"] is not None else "-") writer.writerow(output); response = flask.make_response(si.getvalue()) response.headers["Content-type"] = "text/csv" response.headers["Content-Disposition"] = "attachment; filename=octoprint_print_history_export.csv" elif exportType == 'excel': import xlsxwriter workbook = xlsxwriter.Workbook(si) worksheet = workbook.add_worksheet() col = 0 for header in headers: worksheet.write(0, col, header) col += 1 row = 1 for historyDetails in history_dict: worksheet.write(row, 0, (historyDetails["fileName"] if "fileName" in historyDetails and historyDetails["fileName"] is not None else "-")) worksheet.write(row, 1, (historyDetails["timestamp"] if "timestamp" in historyDetails and historyDetails["timestamp"] is not None else "-")) worksheet.write(row, 2, (historyDetails["success"] if "success" in historyDetails and historyDetails["success"] is not None else "-")) worksheet.write(row, 3, (historyDetails["printTime"] if "printTime" in historyDetails and historyDetails["printTime"] is not None else "-")) worksheet.write(row, 4, (historyDetails["filamentLength"] if "filamentLength" in historyDetails and historyDetails["filamentLength"] is not None else "-")) worksheet.write(row, 5, (historyDetails["filamentVolume"] if "filamentVolume" in historyDetails and historyDetails["filamentVolume"] is not None else "-")) row += 1 workbook.close() response = flask.make_response(si.getvalue()) response.headers["Content-type"] = "application/vnd.ms-excel" response.headers["Content-Disposition"] = "attachment; filename=octoprint_print_history_export.xls" return response else: return flask.make_response("No history file", 400)
Python
0
@@ -3618,16 +3618,17 @@ port.xls +x %22%0D%0A%0D%0A
ee330d0b0092e1a8fe7c25cbe170973647d59baa
enable staff with clearance to create own office hours
web/impact/impact/v1/serializers/office_hours_serializer.py
web/impact/impact/v1/serializers/office_hours_serializer.py
from datetime import timedelta from rest_framework.serializers import ( ModelSerializer, ValidationError, ) from accelerator_abstract.models.base_user_utils import is_employee from accelerator.models import ( MentorProgramOfficeHour, UserRole ) from .location_serializer import LocationSerializer from .user_serializer import UserSerializer INVALID_END_DATE = 'office hour end time must be later than the start time' INVALID_USER = ('must be of type Mentor or Alumni in residence ' 'in an active program') INVALID_SESSION_DURATION = 'Please specify a duration of 30 minutes or more.' THIRTY_MINUTES = timedelta(minutes=30) NO_START_DATE_TIME = "start_date_time must be specified" NO_END_DATE_TIME = "end_date_time must be specified" class OfficeHourSerializer(ModelSerializer): class Meta: model = MentorProgramOfficeHour fields = [ 'id', 'mentor', 'start_date_time', 'end_date_time', 'topics', 'description', 'location', ] def validate(self, attrs): start_date_time = None end_date_time = None if self.instance is not None: start_date_time = self.instance.start_date_time end_date_time = self.instance.end_date_time start_date_time = attrs.get('start_date_time') or start_date_time end_date_time = attrs.get('end_date_time') or end_date_time if not start_date_time: raise ValidationError({ 'start_date_time': NO_START_DATE_TIME}) if not end_date_time: raise ValidationError({ 'end_date_time': NO_END_DATE_TIME}) if start_date_time > end_date_time: raise ValidationError({ 'end_date_time': INVALID_END_DATE}) if end_date_time - start_date_time < THIRTY_MINUTES: raise ValidationError({ 'end_date_time': INVALID_SESSION_DURATION}) return attrs def validate_mentor(self, mentor): user = self.context['request'].user if not is_employee(user): return user roles = [UserRole.MENTOR, UserRole.AIR] is_allowed_mentor = mentor.programrolegrant_set.filter( program_role__user_role__name__in=roles, program_role__program__program_status='active', ).exists() if not is_allowed_mentor: raise ValidationError(INVALID_USER) return mentor def to_representation(self, instance): data = super().to_representation(instance) data['mentor'] = UserSerializer(instance.mentor).data data['location'] = LocationSerializer(instance.location).data return data
Python
0
@@ -110,16 +110,100 @@ rror,%0A)%0A +from accelerator_abstract.models.base_clearance import (%0A CLEARANCE_LEVEL_STAFF%0A) %0Afrom ac @@ -535,16 +535,34 @@ ('must +have clearance or be of ty @@ -588,18 +588,8 @@ in -residence '%0A @@ -603,16 +603,26 @@ ' +residence in an ac @@ -1354,24 +1354,16 @@ te_time%0A - %0A @@ -1739,24 +1739,16 @@ _TIME%7D)%0A - %0A @@ -2051,24 +2051,16 @@ n attrs%0A - %0A def @@ -2060,24 +2060,26 @@ def -validate +is_allowed _mentor( @@ -2101,16 +2101,22 @@ +staff_ user = s @@ -2155,133 +2155,278 @@ -if not is_employee(user):%0A return user%0A roles = %5BUserRole.MENTOR, UserRole.AIR%5D%0A is_allowed_mentor = +roles = %5BUserRole.MENTOR, UserRole.AIR%5D%0A if staff_user == mentor:%0A return staff_user.clearances.filter(%0A level=CLEARANCE_LEVEL_STAFF,%0A program_family__programs__program_status='active'%0A ).exists()%0A return men @@ -2590,31 +2590,178 @@ xists()%0A - if not +%0A def validate_mentor(self, mentor):%0A user = self.context%5B'request'%5D.user%0A if not is_employee(user):%0A return user%0A if not self. is_allow @@ -2765,24 +2765,32 @@ lowed_mentor +(mentor) :%0A
430810f64cc274490027d0843b67f8ceee79db52
Fix typo (#14802)
erpnext/hr/doctype/employee_advance/employee_advance.py
erpnext/hr/doctype/employee_advance/employee_advance.py
# -*- coding: utf-8 -*- # Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and contributors # For license information, please see license.txt from __future__ import unicode_literals import frappe, erpnext from frappe import _ from frappe.model.document import Document from frappe.utils import flt, nowdate class EmployeeAdvanceOverPayment(frappe.ValidationError): pass class EmployeeAdvance(Document): def onload(self): self.get("__onload").make_payment_via_journal_entry = frappe.db.get_single_value('Accounts Settings', 'make_payment_via_journal_entry') def validate(self): self.set_status() self.validate_employee_advance_account() def on_cancel(self): self.set_status() def set_status(self): if self.docstatus == 0: self.status = "Draft" if self.docstatus == 1: if self.claimed_amount and flt(self.claimed_amount) == flt(self.paid_amount): self.status = "Claimed" elif self.paid_amount and self.advance_amount == flt(self.paid_amount): self.status = "Paid" else: self.status = "Unpaid" elif self.docstatus == 2: self.status = "Cancelled" def validate_employee_advance_account(self): company_currency = erpnext.get_company_currency(self.company) if (self.advance_account and company_currency != frappe.db.get_value('Account', self.advance_account, 'account_currency')): frappe.throw(_("Advance account currency should be same as company curreny {0}") .format(company_currency)) def set_total_advance_paid(self): paid_amount = frappe.db.sql(""" select ifnull(sum(debit_in_account_currency), 0) as paid_amount from `tabGL Entry` where against_voucher_type = 'Employee Advance' and against_voucher = %s and party_type = 'Employee' and party = %s """, (self.name, self.employee), as_dict=1)[0].paid_amount if flt(paid_amount) > self.advance_amount: frappe.throw(_("Row {0}# Paid Amount cannot be greater than requested advance amount"), EmployeeAdvanceOverPayment) self.db_set("paid_amount", paid_amount) self.set_status() frappe.db.set_value("Employee Advance", self.name , "status", self.status) def update_claimed_amount(self): claimed_amount = frappe.db.sql(""" select sum(ifnull(allocated_amount, 0)) from `tabExpense Claim Advance` where employee_advance = %s and docstatus=1 and allocated_amount > 0 """, self.name)[0][0] frappe.db.set_value("Employee Advance", self.name, "claimed_amount", claimed_amount) @frappe.whitelist() def get_due_advance_amount(employee, posting_date): employee_due_amount = frappe.get_all("Employee Advance", \ filters = {"employee":employee, "docstatus":1, "posting_date":("<=", posting_date)}, \ fields = ["advance_amount", "paid_amount"]) return sum([(emp.advance_amount - emp.paid_amount) for emp in employee_due_amount]) @frappe.whitelist() def make_bank_entry(dt, dn): from erpnext.accounts.doctype.journal_entry.journal_entry import get_default_bank_cash_account doc = frappe.get_doc(dt, dn) payment_account = get_default_bank_cash_account(doc.company, account_type="Cash", mode_of_payment=doc.mode_of_payment) je = frappe.new_doc("Journal Entry") je.posting_date = nowdate() je.voucher_type = 'Bank Entry' je.company = doc.company je.remark = 'Payment against Employee Advance: ' + dn + '\n' + doc.purpose je.append("accounts", { "account": doc.advance_account, "debit_in_account_currency": flt(doc.advance_amount), "reference_type": "Employee Advance", "reference_name": doc.name, "party_type": "Employee", "party": doc.employee, "is_advance": "Yes" }) je.append("accounts", { "account": payment_account.account, "credit_in_account_currency": flt(doc.advance_amount), "account_currency": payment_account.account_currency, "account_type": payment_account.account_type }) return je.as_dict()
Python
0.002483
@@ -1409,16 +1409,17 @@ y curren +c y %7B0%7D%22)%0A
7e8e5ceb765189974bcaee86e15c26b94ac05f3a
Update modulation.py
examples/modulation.py
examples/modulation.py
#!/usr/bin/env python # -*- coding: utf-8 -*- import initExample import os from lase.core import KClient # Driver to use from lase.drivers import Oscillo # Modules to import import numpy as np import matplotlib.pyplot as plt import time # Connect to Lase host = os.getenv('HOST','192.168.1.100') client = KClient(host) driver = Oscillo(client) # Replace with appropriate driver # Enable laser driver.start_laser() # Set laser current current = 30 # mA driver.set_laser_current(current) # Modulation on DAC amp_mod = 0.2 freq_mod = 1e6 driver.dac[1, :] = amp_mod*np.sin(2 * np.pi * freq_mod * driver.sampling.t) driver.set_dac() # Signal on ADC driver.get_adc() signal = driver.adc[0, :] # Plot plt.plot(driver.sampling.t, signal) plt.show() # Plot psd_signal = np.abs(np.fft.fft(signal)) ** 2 plt.semilogy(1e-6 * np.fft.fftshift(driver.sampling.f_fft), np.fft.fftshift(psd_signal)) plt.xlabel('Frequency (MHz)') plt.show() # Disable laser driver.stop_laser() driver.close()
Python
0.000001
@@ -240,23 +240,35 @@ %0A%0A# -Connect to Lase +Load the oscillo instrument %0Ahos @@ -305,16 +305,169 @@ 1.100')%0A +password = os.getenv('PASSWORD','changeme')%0Assh = ZynqSSH(host, password)%0Assh.unzip_app()%0Assh.install_instrument('oscillo')%0A%0A# Connect to the instrument%0A client = @@ -509,43 +509,8 @@ ent) - # Replace with appropriate driver %0A%0A#
011ee4cb3d83e9a83e490b2cfe4d5ee77b81bf8b
read transport_class from flask settings
opbeat/contrib/flask/__init__.py
opbeat/contrib/flask/__init__.py
""" opbeat.contrib.flask ~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2011-2012 Opbeat Large portions are :copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from __future__ import absolute_import import logging import os import warnings from flask import request, signals import opbeat.instrumentation.control from opbeat.base import Client from opbeat.conf import setup_logging from opbeat.contrib.flask.utils import get_data_from_request from opbeat.handlers.logging import OpbeatHandler from opbeat.utils import disabled_due_to_debug, get_name_from_func from opbeat.utils.deprecation import deprecated logger = logging.getLogger('opbeat.errors.client') def make_client(client_cls, app, organization_id=None, app_id=None, secret_token=None): opbeat_config = app.config.get('OPBEAT', {}) # raise a warning if OPBEAT_ORGANIZATION_ID is set in the config, but not # ORGANIZATION_ID. Until 1.3.1, we erroneously checked only # OPBEAT_ORGANIZATION_ID if ('OPBEAT_ORGANIZATION_ID' in opbeat_config and 'ORGANIZATION_ID' not in opbeat_config): warnings.warn( 'Please use ORGANIZATION_ID to set the opbeat ' 'organization id your configuration', DeprecationWarning, ) # raise a warning if APP_ID is set in the environment, but not OPBEAT_APP_ID # Until 1.3.1, we erroneously checked only APP_ID if 'APP_ID' in os.environ and 'OPBEAT_APP_ID' not in os.environ: warnings.warn( 'Please use OPBEAT_APP_ID to set the opbeat ' 'app id in the environment', DeprecationWarning, ) # raise a warning if SECRET_TOKEN is set in the environment, but not # OPBEAT_SECRET_TOKEN. Until 1.3.1, we erroneously checked only SECRET_TOKEN if 'SECRET_TOKEN' in os.environ and 'OPBEAT_SECRET_TOKEN' not in os.environ: warnings.warn( 'Please use OPBEAT_SECRET_TOKEN to set the opbeat secret token ' 'in the environment', DeprecationWarning, ) organization_id = ( organization_id or opbeat_config.get('ORGANIZATION_ID') or # config os.environ.get('OPBEAT_ORGANIZATION_ID') or # environment opbeat_config.get('OPBEAT_ORGANIZATION_ID') # deprecated fallback ) app_id = ( app_id or opbeat_config.get('APP_ID') or # config os.environ.get('OPBEAT_APP_ID') or # environment os.environ.get('APP_ID') # deprecated fallback ) secret_token = ( secret_token or opbeat_config.get('SECRET_TOKEN') or # config os.environ.get('OPBEAT_SECRET_TOKEN') or # environment os.environ.get('SECRET_TOKEN') # deprecated fallback ) return client_cls( organization_id=organization_id, app_id=app_id, secret_token=secret_token, include_paths=set(opbeat_config.get('INCLUDE_PATHS', [])) | set([app.import_name]), exclude_paths=opbeat_config.get('EXCLUDE_PATHS'), servers=opbeat_config.get('SERVERS'), hostname=opbeat_config.get('HOSTNAME'), auto_log_stacks=opbeat_config.get('AUTO_LOG_STACKS'), timeout=opbeat_config.get('TIMEOUT'), string_max_length=opbeat_config.get('STRING_MAX_LENGTH'), list_max_length=opbeat_config.get('LIST_MAX_LENGTH'), traces_freq_send=opbeat_config.get('TRACES_FREQ_SEND'), processors=opbeat_config.get('PROCESSORS'), async_mode=opbeat_config.get('ASYNC') ) class Opbeat(object): """ Flask application for Opbeat. Look up configuration from ``os.environ['OPBEAT_ORGANIZATION_ID']``, ``os.environ.get('OPBEAT_APP_ID')`` and ``os.environ.get('OPBEAT_SECRET_TOKEN')``:: >>> opbeat = Opbeat(app) Pass an arbitrary ORGANIZATION_ID, APP_ID and SECRET_TOKEN:: >>> opbeat = Opbeat(app, organiation_id='1', app_id='1', secret_token='asdasdasd') Pass an explicit client:: >>> opbeat = Opbeat(app, client=client) Automatically configure logging:: >>> opbeat = Opbeat(app, logging=True) Capture an exception:: >>> try: >>> 1 / 0 >>> except ZeroDivisionError: >>> opbeat.capture_exception() Capture a message:: >>> opbeat.captureMessage('hello, world!') """ def __init__(self, app=None, organization_id=None, app_id=None, secret_token=None, client=None, client_cls=Client, logging=False): self.organization_id = organization_id self.app_id = app_id self.secret_token = secret_token self.logging = logging self.client_cls = client_cls self.client = client if app: self.init_app(app) def handle_exception(self, *args, **kwargs): if not self.client: return if disabled_due_to_debug( self.app.config.get('OPBEAT', {}), self.app.config.get('DEBUG', False) ): return self.client.capture( 'Exception', exc_info=kwargs.get('exc_info'), data=get_data_from_request(request), extra={ 'app': self.app, }, ) def init_app(self, app): self.app = app if not self.client: self.client = make_client( self.client_cls, app, self.organization_id, self.app_id, self.secret_token, ) if self.logging: setup_logging(OpbeatHandler(self.client)) signals.got_request_exception.connect(self.handle_exception, sender=app, weak=False) # Instrument to get traces skip_env_var = 'SKIP_INSTRUMENT' if skip_env_var in os.environ: logger.debug("Skipping instrumentation. %s is set.", skip_env_var) else: opbeat.instrumentation.control.instrument() signals.request_started.connect(self.request_started) signals.request_finished.connect(self.request_finished) def request_started(self, app): self.client.begin_transaction("web.flask") def request_finished(self, app, response): rule = request.url_rule.rule if request.url_rule is not None else "" self.client.end_transaction(rule, response.status_code) def capture_exception(self, *args, **kwargs): assert self.client, 'capture_exception called before application configured' return self.client.capture_exception(*args, **kwargs) def capture_message(self, *args, **kwargs): assert self.client, 'capture_message called before application configured' return self.client.capture_message(*args, **kwargs) @deprecated(alternative="capture_exception()") def captureException(self, *args, **kwargs): return self.capture_exception(*args, **kwargs) @deprecated(alternative="capture_message()") def captureMessage(self, *args, **kwargs): return self.capture_message(*args, **kwargs)
Python
0
@@ -3093,24 +3093,92 @@ 'SERVERS'),%0A + transport_class=opbeat_config.get('TRANSPORT_CLASS', None),%0A host
d876ce9e7b50b6e5f1161a937097f00b6fafe09f
update test
myideas/core/tests/test_views.py
myideas/core/tests/test_views.py
from django.test import TestCase from django.shortcuts import resolve_url as r from django.contrib.auth import get_user_model from myideas.core.forms import IdeasForm from myideas.core.models import Ideas class HomeTest(TestCase): def setUp(self): self.response = self.client.get(r('home')) def test_get(self): """GET 'Home' must return status code 200""" self.assertEqual(200, self.response.status_code) def test_template(self): """'Home' must use template index.html and base.html""" self.assertTemplateUsed(self.response, 'index.html') self.assertTemplateUsed(self.response, 'base.html') def test_login_link(self): """base.html navbar must contains login page link""" expected = 'href="{}"'.format(r('auth_login')) self.assertContains(self.response, expected) def test_register_link(self): """base.html navbar must contains register page link""" expected = 'href="{}"'.format(r('registration_register')) self.assertContains(self.response, expected) def test_ideas_form_link(self): """base.html navbar contains ideas_form link""" expected = 'href="{}"'.format(r('ideas_form')) self.assertContains(self.response, expected) class DetailsTest(TestCase): def setUp(self): user = get_user_model().objects.create(username='adminapp') self.idea = Ideas.objects.create(user=user, title='test app') self.response = self.client.get(r(self.idea.get_absolute_url())) def test_get(self): """GET 'Ideas Details' must return status code 200""" self.assertEqual(200, self.response.status_code) def test_template(self): """'Ideas Details' must use template ideas_details.html and base.html""" self.assertTemplateUsed(self.response, 'ideas_details.html') self.assertTemplateUsed(self.response, 'base.html') class ProfileTest(TestCase): def setUp(self): user = get_user_model().objects.create(username='adminapp') self.idea = Ideas.objects.create(user=user) self.response = self.client.get(r('profile', self.idea.user)) def test_get(self): """GET 'User Profile' must return status code 200""" self.assertEqual(200, self.response.status_code) def test_template(self): """'User Profile' must use template profile.html and base.html""" self.assertTemplateUsed(self.response, 'profile.html') self.assertTemplateUsed(self.response, 'base.html') class IdeaFormTest(TestCase): def setUp(self): self.response = self.client.get(r('ideas_form')) def test_get(self): """GET 'Ideas Form' must return status code 200""" self.assertEqual(200, self.response.status_code) def test_has_form_on_context(self): self.assertIsInstance(self.response.context['form'], IdeasForm) def test_template(self): """'Ideas Form' must use template index.html and base.html""" self.assertTemplateUsed(self.response, 'idea_form.html') self.assertTemplateUsed(self.response, 'base.html') class IdeasDetailNotFound(TestCase): def setUp(self): self.response = self.client.get(r('ideas_details.html', slug='not-found')) def test_not_found(self): self.assertEqual(404, self.response.status_code) def test_template(self): """'page not found' must use template 404.html and base.html""" self.assertTemplateUsed(self.response, '404.html') self.assertTemplateUsed(self.response, 'base.html')
Python
0.000001
@@ -119,17 +119,16 @@ r_model%0A -%0A from myi @@ -3552,28 +3552,29 @@ (self.response, 'base.html') +%0A
0aa6ab1858ccac9154a79ece6bc48a7e7172ad5a
Redefine CategorySerializer.parent
bulletin/api/serializers.py
bulletin/api/serializers.py
from django.contrib.auth.models import User from rest_framework import serializers from ..models import (Category, Issue, IssueTemplate, Link, Newsletter, Section, SectionTemplate, Post, AdSize, Ad) class UserSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = User fields = ('id', 'username', 'first_name', 'last_name') class CategorySerializer(serializers.HyperlinkedModelSerializer): parent = serializers.StringRelatedField(many=False, required=False, allow_null=True) section_templates = serializers.StringRelatedField(many=True, required=False) class Meta: model = Category fields = ('id', 'name', 'parent', 'fully_qualified_name', 'section_templates') class LinkSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Link fields = ('id', 'text', 'url') class PostSerializer(serializers.HyperlinkedModelSerializer): submitter = serializers.PrimaryKeyRelatedField(many=False, required=False, read_only=True) links = LinkSerializer(many=True, required=False, read_only=True) class Meta: model = Post fields = ('id', 'title', 'url', 'approved', 'pub_date', 'submitter', 'position', 'links') class SectionPostReorderSerializer(serializers.HyperlinkedModelSerializer): """Used for section-post-up, section-post-down, etc. """ class Meta: model = Post fields = ('id', 'position') read_only_fields = ('position',) class SectionSerializer(serializers.HyperlinkedModelSerializer): posts = PostSerializer(many=True, required=False, read_only=True) class Meta: model = Section fields = ('id', 'name', 'posts', 'position') class IssueSectionReorderSerializer(serializers.HyperlinkedModelSerializer): """Used for issue-section-up, issue-section-down, etc. """ class Meta: model = Section fields = ('id', 'position') read_only_fields = ('position',) class IssueSerializer(serializers.HyperlinkedModelSerializer): sections = SectionSerializer(many=True, required=False) class Meta: model = Issue fields = ('id', 'pub_date', 'sections', 'name', 'subject', 'from_name', 'from_email', 'reply_to_email', 'organization_name', 'address_line_1', 'address_line_2', 'address_line_3', 'city', 'state', 'international_state', 'postal_code', 'country', 'html_template_name', 'text_template_name') def create(self, validated_data): sections_data = validated_data.pop('sections', []) issue = Issue.objects.create(**validated_data) for section_data in sections_data: Section.objects.create(issue=issue, **section_data) return issue class NewsletterSerializer(serializers.HyperlinkedModelSerializer): issues = IssueSerializer(many=True, required=False) class Meta: model = Newsletter fields = ('id', 'name', 'issues') class SectionTemplateSerializer(serializers.HyperlinkedModelSerializer): categories = CategorySerializer(many=True, required=False) class Meta: model = SectionTemplate fields = ('id', 'name', 'position', 'categories') class IssueTemplateSerializer(serializers.HyperlinkedModelSerializer): section_templates = SectionTemplateSerializer(many=True, required=False) class Meta: model = IssueTemplate fields = ('id', 'name', 'section_templates', 'subject', 'from_name', 'from_email', 'reply_to_email', 'organization_name', 'address_line_1', 'address_line_2', 'address_line_3', 'city', 'state', 'international_state', 'postal_code', 'country', 'html_template_name', 'text_template_name') class AdSizeSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = AdSize fields = ('name', 'width', 'height') class AdSerializer(serializers.HyperlinkedModelSerializer): size = serializers.PrimaryKeyRelatedField( queryset=AdSize.objects.all(), many=False, required=True) class Meta: model = Ad fields = ('name', 'start', 'end', 'size', 'url', 'show_on_website', 'include_in_newsletter')
Python
0
@@ -642,38 +642,42 @@ t = serializers. -String +PrimaryKey RelatedField(man @@ -665,32 +665,41 @@ KeyRelatedField( +%0A many=False,%0A @@ -706,36 +706,47 @@ - +default=None,%0A allow_null=True,%0A @@ -743,38 +743,39 @@ True,%0A re -quired +ad_only =False,%0A @@ -778,59 +778,39 @@ - allow_null=True +queryset=Category.objects.all() )%0A
0c0ff9eca91dd867ce097dfd6fe4164db8034ce1
Optimize wow character command
Discord/cogs/wow.py
Discord/cogs/wow.py
import discord from discord.ext import commands import datetime import clients import credentials from utilities import checks def setup(bot): bot.add_cog(WoW(bot)) class WoW: def __init__(self, bot): self.bot = bot @commands.group(aliases = ["worldofwarcraft", "world_of_warcraft"], invoke_without_command = True) @checks.not_forbidden() async def wow(self, ctx): '''World of Warcraft''' await ctx.invoke(self.bot.get_command("help"), ctx.invoked_with) @wow.command() @checks.not_forbidden() async def character(self, ctx, character : str, *, realm : str): '''WIP''' # get classes classes = {} url = "https://us.api.battle.net/wow/data/character/classes" params = {"apikey": ctx.bot.BATTLE_NET_API_KEY} async with clients.aiohttp_session.get(url, params = params) as resp: data = await resp.json() for wow_class in data["classes"]: classes[wow_class["id"]] = wow_class["name"] # get races races = {} url = "https://us.api.battle.net/wow/data/character/races" async with clients.aiohttp_session.get(url, params = params) as resp: data = await resp.json() for wow_race in data["races"]: races[wow_race["id"]] = wow_race["name"] # add side/faction? genders = {0: "Male", 1: "Female"} url = f"https://us.api.battle.net/wow/character/{realm}/{character}" async with clients.aiohttp_session.get(url, params = params) as resp: data = await resp.json() if resp.status != 200: await ctx.embed_reply(f":no_entry: Error: {data['reason']}") return title_url = f"http://us.battle.net/wow/en/character/{data['realm'].replace(' ', '-')}/{data['name']}/" thumbnail_url = f"http://render-us.worldofwarcraft.com/character/{data['thumbnail']}" fields = [("Level", data["level"]), ("Achievement Points", data["achievementPoints"]), ("Class", f"{classes.get(data['class'], 'Unknown')}\n" "[Talent Calculator](http://us.battle.net/wow/en/tool/talent-calculator#{data['calcClass']})"), ("Race", races.get(data["race"], "Unknown")), ("Gender", genders.get(data["gender"], "Unknown"))] timestamp = datetime.datetime.utcfromtimestamp(data["lastModified"] / 1000.0) await ctx.embed_reply(f"{data['realm']} ({data['battlegroup']})", title = data["name"], title_url = title_url, thumbnail_url = thumbnail_url, fields = fields, footer_text = "Last seen", timestamp = timestamp) # faction and total honorable kills? @wow.command() @checks.not_forbidden() async def statistics(self, ctx, character : str, *, realm : str): '''WIP''' url = f"https://us.api.battle.net/wow/character/{realm}/{character}" params = {"fields": "statistics", "apikey": ctx.bot.BATTLE_NET_API_KEY} async with clients.aiohttp_session.get(url, params = params) as resp: data = await resp.json() statistics = data["statistics"] title_url = f"http://us.battle.net/wow/en/character/{data['realm'].replace(' ', '-')}/{data['name']}/" # await ctx.embed_reply(f"{data['realm']} ({data['battlegroup']})", # title = data["name"], title_url = title_url)
Python
0.000003
@@ -1442,16 +1442,23 @@ 00:%0A%09%09%09%09 +return await ct @@ -1514,19 +1514,8 @@ %7D%22)%0A -%09%09%09%09return%0A %09%09ti
2cec24c4a870d76d725c14ed10202f419c80dfb0
Update internal box metadata AGAIN
eex/metadata/additional_metadata.py
eex/metadata/additional_metadata.py
""" Contains metadata for additional simulation details """ _box_info = { "dimensions": { "xlo": "[length]", "xhi": "[length]", "ylo": "[length]", "yhi": "[length]", "zlo": "[length]", "zhi": "[length]", "alpha": "radian", "beta": "radian", "gamma": "radian", }, "description": "This is information defines the simulation box", } _boundary = { "x": ["p", "f"], "y": ["p", "f"], "z": ["p", "f"], } _box_origin = ["origin", "center", "custom"] _electrostatics = { "ewald": { "parameters": ["alpha", "accuracy", "kmax"], "units": { "alpha": "[length]**-1", "accuracy": "dimensionless", "kmaxx": "count", "kmaxy": "count", "kmaxz": "count", }, "description": "Information for the Ewald method", }, "pme": { "parameters": ["g_ewald", "grid_size", "order", "accuracy"], "units": { "g_ewald": "[length]**-1", "grid_size_x": "dimensionless", "grid_size_y": "dimensionless", "grid_size_z": "dimensionless", "order": "dimensionless", "accuracy": "dimensionless", }, "description": "Information for the PME method", }, "pppm": { "parameters": ["g_ewald", "grid_size", "order", "accuracy"], "units": { "g_ewald": "[length]**-1", "grid_size_x": "dimensionless", "grid_size_y": "dimensionless", "grid_size_z": "dimensionless", "order": "dimensionless", "accuracy": "dimensionless", }, "description": "Information for the PPME method", }, "wolf": { "parameters": ["alpha", "accuracy"], "units": { "alpha": "[length] ** -1", }, "description": "Information for the Wolf method", }, "dsf": { "parameters": ["alpha"], "units": { "alpha": "[length] ** -1", }, "description": "Information for the DSF method", }, "reaction": { "parameters": ["A"], "units": { "A": "dimensionless", #This term depends on dielectric }, "description": "Information for the reaction method", }, "cut": { "parameters": [], "units": { }, "description": "Information for the cut method", }, "shift":{ "parameters": [], "units": { }, "description": "Information for the shift method", }, "debye": { "parameters": ["alpha"], "units": { "alpha": "[length]**-1", }, "description": "Information for the Debye method", }, "cutoff": "[length]", } _van_der_waals = { "cutoff": "[length]", "long": { "standard":{ "parameters": [], "units": {} }, "switching": { "parameters":["switching_radius"], "units": { "switching_radius": "[length]", }, }, "shift"{ "parameters": [], "units": {} }, "cut"{ "parameters": [], "units": {} }, } _mixing_rule = ["lorentz_berthelot", "geometric", "kong", "sixth_power", "custom"] _neighbor = { "verlet" = { { "skin": "[length]", "frequency": "count", }, } #_special_bonds = { #} _exclusions = { "electrostatic":{ "scale12": "dimensionless", "scale13": "dimensionless", "scale14": "dimensionless", } "van_der_waals":{ "scale12": "dimensionless", "scale13": "dimensionless", "scale14": "dimensionless", } } _torsion_convention = ["180_is_trans", "0_is_trans"] #_groups = { #}
Python
0
@@ -102,11 +102,9 @@ %22 -xlo +a %22: %22 @@ -127,11 +127,9 @@ %22 -xhi +b %22: %22 @@ -152,92 +152,9 @@ %22 -ylo%22: %22%5Blength%5D%22,%0A %22yhi%22: %22%5Blength%5D%22,%0A %22zlo%22: %22%5Blength%5D%22,%0A %22zhi +c %22: %22
2bc474b83f0e3ad340127f626cfe6597cbd2ba4e
Allow for interactive inputs
cal_pipe/manual_flagging.py
cal_pipe/manual_flagging.py
import sys ''' Plot visibility data for each spw to allow for easy manual flags ''' vis = sys.argv[1] field = sys.argv[2] corrstring = sys.argv[3] tb.open(vis + '/SPECTRAL_WINDOW') freqs = tb.getcol('REF_FREQUENCY') nchans = tb.getcol('NUM_CHAN') tb.close() spws = range(0, len(freqs)) for spw in spws: nchan = nchans[spw] print "On " + str(spw+1) + " of " + str(len(freqs)) default('plotms') vis = vis xaxis = 'channel' yaxis = 'amp' ydatacolumn = 'corrected' selectdata = True field = field spw = str(spw) correlation = corrstring averagedata = True avgtime = '1e8s' avgscan = False transform = False extendflag = False iteraxis = '' coloraxis = 'antenna2' plotrange = [] xlabel = '' ylabel = '' showmajorgrid = False showminorgrid = False plotms() raw_input("Continue?") default('plotms') vis = vis xaxis = 'channel' yaxis = 'phase' ydatacolumn = 'corrected' selectdata = True field = field spw = str(spw) correlation = corrstring averagedata = True avgtime = '1e8s' avgscan = False transform = False extendflag = False iteraxis = '' coloraxis = 'antenna2' plotrange = [] xlabel = '' ylabel = '' showmajorgrid = False showminorgrid = False plotms() raw_input("Continue?") default('plotms') vis = vis xaxis = 'time' yaxis = 'amp' ydatacolumn = 'corrected' selectdata = True field = field spw = str(spw) correlation = corrstring averagedata = True avgchannel = str(nchan) avgscan = False transform = False extendflag = False iteraxis = '' coloraxis = 'antenna2' plotrange = [] xlabel = '' ylabel = '' showmajorgrid = False showminorgrid = False plotms() raw_input("Continue?") default('plotms') vis = vis xaxis = 'time' yaxis = 'phase' ydatacolumn = 'corrected' selectdata = True field = field spw = str(spw) correlation = corrstring averagedata = True avgchannel = str(nchan) avgscan = False transform = False extendflag = False iteraxis = '' coloraxis = 'antenna2' plotrange = [] xlabel = '' ylabel = '' showmajorgrid = False showminorgrid = False plotms() raw_input("Continue?") default('plotms') vis = vis xaxis = 'uvwave' yaxis = 'amp' ydatacolumn = 'corrected' selectdata = True field = field spw = str(spw) correlation = corrstring averagedata = True avgchannel = str(nchan) avgtime = '1e8s' avgscan = False transform = False extendflag = False iteraxis = '' coloraxis = 'antenna2' plotrange = [] xlabel = '' ylabel = '' showmajorgrid = False showminorgrid = False plotms() raw_input("Continue?")
Python
0.000002
@@ -80,16 +80,25 @@ gs%0A'''%0A%0A +try:%0A vis = sy @@ -107,16 +107,20 @@ argv%5B1%5D%0A + field = @@ -131,16 +131,20 @@ argv%5B2%5D%0A + corrstri @@ -159,16 +159,162 @@ .argv%5B3%5D +%0Aexcept IndexError:%0A vis = raw_input(%22MS Name? : %22)%0A field = raw_input(%22Field Name/Number? : %22)%0A corrstring = raw_input(%22Corrstring? : %22) %0A%0Atb.ope
6dbcc892f8b659a22a33fce3836cb082b64dd817
load order
campos_event/__openerp__.py
campos_event/__openerp__.py
# -*- coding: utf-8 -*- ############################################################################## # # This file is part of CampOS Event, # an Odoo module. # # Copyright (c) 2015 Stein & Gabelgaard ApS # http://www.steingabelgaard.dk # Hans Henrik Gaelgaard # # CampOS Event is free software: # you can redistribute it and/or modify it under the terms of the GNU # Affero General Public License as published by the Free Software # Foundation,either version 3 of the License, or (at your option) any # later version. # # CampOS Event is distributed # in the hope that it will be useful, but WITHOUT ANY WARRANTY; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR # PURPOSE. See the GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with CampOS Event. # If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': "CampOS Event", 'summary': """ Scout Camp Management Solution""", # 'description': put the module description in README.rst 'author': "Hans Henrik Gabelgaard", 'website': "http://www.steingabelgaard.dk", # Categories can be used to filter modules in modules listing # Check http://goo.gl/0TfwzD for the full list 'category': 'Uncategorized', 'version': '0.1', 'license': 'AGPL-3', # any module necessary for this one to work correctly 'depends': [ 'base', 'mail', 'event', 'website', 'portal', ], # always loaded 'data': [ 'security/campos_event_security.xml', 'security/ir.model.access.csv', 'security/ir.rule.csv', 'data/campos.municipality.csv', 'data/campos.scout.org.csv', 'views/campos_menu.xml', 'views/templates.xml', 'views/participant_view.xml', 'views/committee_view.xml', 'views/municipality_view.xml', "views/scout_org_view.xml", "views/res_partner_view.xml", "views/job_view.xml", "views/job_template.xml", "views/mail_templates.xml", "views/portal_menu.xml", "views/res_users_view.xml", ], # only loaded in demonstration mode 'demo': [ 'demo.xml', ], }
Python
0.000001
@@ -1972,32 +1972,8 @@ -'views/campos_menu.xml', %0A @@ -2352,24 +2352,57 @@ _view.xml%22,%0A + 'views/campos_menu.xml',%0A %5D,%0A #
c48e825e768c1ff0c4eab9ce1f623c81b5856cd1
Decrease Exploration Rate was True in the Grid search.
Dyna_Grid_Search.py
Dyna_Grid_Search.py
''' This is a GRID search to find the best parameters of an algorithm. In future developments it will be parallelized. In the Mountain Car Problem, for Sarsa with Tile Coding, we have the parameters from Sutton and Barto Book alpha = 0.5 (then divided by num tilings, so it becomes 0.5/0.8, check the implementation of the agents) decaying factor = 0.96 lambda = 0.96 Discretization = 8,8 ''' import gym from gym import envs from agents import TabularSarsaAgent from agents import ApproximatedSarsaLambdaAgent from agents import HAApproximatedSarsaLambdaAgent from agents import StaticHeuristicApproximatedSarsaLambdaAgent from static_heuristics.EuclideanHeuristic import EuclideanHeuristic from model.dyna_model import dyna_model import numpy as np import matplotlib.pyplot as plt from gym_maze.envs.maze_env import * import time from model import m3d_model import pickle print(envs.registry.all()) env = gym.make("dyna-v0") env.seed(7) env._max_episode_steps = 5000 repetitions = 25 episodes = 50 env.reset() obs_mins = env.observation_space.low obs_maxs = env.observation_space.high #[env.observation_space[0].max_value, env.observation_space[1].max_value] print obs_mins print obs_maxs discretizations = [20,20] #position num_tilings = 10 total_result = [] rend = False # render or not. #values for Rho rho_pos = [0.1,0.3,0.6,0.9,0.99] # [0.1,0.5,0.99] #3 #values for psi, for the heuristic psi_pos = [0.001, 0.01,0.1,0.3,0.5] # [0.00001, 0.0001,0.001,0.01,0.1,1] # 5 #values of nu, for the heuristic nu_pos = [1,5,10] #values for discount factor discount_pos = [0.99] # not discounted lmbd = [0.9]# Lambda get the same value... alpha_pos = [0.1] #it becomes 0.5/8, given the num tilings above eps_pos = [0.1] #decaying exploration # one iteration of the grid search algorithms = ["NOH","SH","H"] Strategies = ["Replacing","TrueOnline"] algo = algorithms[1] strat = Strategies[1] hard_soft = "hard" model_based = False if algo == "SH": rho_pos = [0.0] z= 0 #counter for eps in eps_pos: for rho in rho_pos: for psi in psi_pos: for dis in discount_pos: for nu in nu_pos: for alpha in alpha_pos: config = { "Strategy" : strat, "Pheromone_strategy": hard_soft, "decrease_exploration" : True, #Mountain Car has a decaying eploration "learning_rate" : alpha, "psi": psi, "rho": rho, "static_heuristic": EuclideanHeuristic(model= dyna_model(),goal=[25,25],actions_number=4), "model" : m3d_model.m3d_model(), "model_based":model_based, "eps": eps, "nu":nu, # Epsilon in epsilon greedy policies "lambda":lmbd[0], "discount": dis, "n_iter": env._max_episode_steps} times = np.zeros(episodes) results = np.zeros(episodes) print z for j in range(repetitions): # this is to decide for the parameter if algo=="NOH": ag = ApproximatedSarsaLambdaAgent.ApproximatedSarsaLambdaAgent(obs_mins,obs_maxs,env.action_space,discretizations,[num_tilings], my_config=config) elif algo=="SH": ag = StaticHeuristicApproximatedSarsaLambdaAgent.StaticHeuristicApproximatedSarsaLambdaAgent(obs_mins,obs_maxs,env.action_space,discretizations,[num_tilings], my_config=config) else: ag = HAApproximatedSarsaLambdaAgent.HAApproximatedSarsaLambdaAgent(obs_mins,obs_maxs,env.action_space,discretizations,[num_tilings], my_config=config) for i in range(episodes): tb = time.time() ag.learn(env,rend) te = time.time() tdiff= te-tb res= ag.return_last_steps() results[i] = results[i]+res[i] print res[i] times[i] = times[i] + tdiff print i #print (res[-1], [eps,rho,psi,dis,dis,alpha]) #in the maze grid search you are looking for the one with the smallest cumulative_sum total_result.append({"parameters": [eps,rho,psi,dis,lmbd,alpha,nu] , "times":times/repetitions, "20thep": results[-1]/repetitions, "results":results/repetitions, "cumulative_sum": np.sum(results/repetitions)}) # env.step(env.action_space.sample()) # take a random action z = z+1 with open("Dyna_"+algo+"_" + strat + "_" + hard_soft + "model"+str(model_based)+ ".pkl", 'wb') as f: pickle.dump(total_result, f) #Saving the result of the GRID Search
Python
0.000001
@@ -2486,19 +2486,20 @@ tion%22 : -Tru +Fals e, #Moun @@ -2818,76 +2818,8 @@ ),%0D%0A - %22model%22 : m3d_model.m3d_model(),%0D%0A @@ -3495,32 +3495,46 @@ +#env.seed(7)%0D%0A %0D%0A @@ -4830,16 +4830,52 @@ tdiff%0D%0A + %0D%0A
1908abeb6aa0082fb49428185c340a7231cdd467
fix typo in binaryoperator
emu/processes/wps_binaryoperator.py
emu/processes/wps_binaryoperator.py
from pywps import Process, LiteralInput, LiteralOutput from pywps.app.Common import Metadata import logging logger = logging.getLogger("PYWPS") class BinaryOperator(Process): def __init__(self): inputs = [ LiteralInput('inputa', 'Input 1', data_type='float', abstract='Enter Input 1', default="2.0"), LiteralInput('inputb', 'Input 2', data_type='float', abstract='Enter Input 2', default="3.0"), LiteralInput('operator', 'Operator', data_type='string', abstract='Choose a binary Operator', default='add', allowed_values=['add', 'substract', 'divide', 'multipy'])] outputs = [ LiteralOutput('output', 'Binary operator result', data_type='float')] super(BinaryOperator, self).__init__( self._handler, identifier='binaryoperatorfornumbers', title='Binary Operator for Numbers', abstract='Performs operation on two numbers and returns the answer.\ This example process is taken from Climate4Impact.', metadata=[ Metadata('Birdhouse', 'http://bird-house.github.io/'), Metadata('User Guide', 'http://emu.readthedocs.io/en/latest/'), Metadata('Climate4Impact', 'https://dev.climate4impact.eu')], version='1.0', inputs=inputs, outputs=outputs, store_supported=True, status_supported=True ) @staticmethod def _handler(request, response): logger.info("run binary_operator") operator = request.inputs['operator'][0].data input_a = request.inputs['inputa'][0].data input_b = request.inputs['inputb'][0].data if operator == 'substract': response.outputs['output'].data = input_a - input_b elif operator == 'multiply': response.outputs['output'].data = input_a * input_b elif operator == 'divide': response.outputs['output'].data = input_a / input_b else: response.outputs['output'].data = input_a + input_b return response
Python
0.0171
@@ -777,16 +777,17 @@ 'multip +l y'%5D)%5D%0A
ebea01ea751a196fb71a1a7e1f2664e893fa5386
Fix JavaScript presubmit checks to allow for *[...] syntax.
catapult_build/js_checks.py
catapult_build/js_checks.py
# Copyright (c) 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import os import re import sys import warnings from py_vulcanize import strip_js_comments from catapult_build import parse_html class JSChecker(object): def __init__(self, input_api, output_api, file_filter=None): self.input_api = input_api self.output_api = output_api if file_filter: self.file_filter = file_filter else: self.file_filter = lambda x: True def RegexCheck(self, line_number, line, regex, message): """Searches for |regex| in |line| to check for a style violation. The |regex| must have exactly one capturing group so that the relevant part of |line| can be highlighted. If more groups are needed, use "(?:...)" to make a non-capturing group. Sample message: Returns a message like the one below if the regex matches. line 6: Use var instead of const. const foo = bar(); ^^^^^ """ match = re.search(regex, line) if match: assert len(match.groups()) == 1 start = match.start(1) length = match.end(1) - start return ' line %d: %s\n%s\n%s' % ( line_number, message, line, _ErrorHighlight(start, length)) return '' def ConstCheck(self, i, line): """Checks for use of the 'const' keyword.""" if re.search(r'\*\s+@const', line): # Probably a JsDoc line. return '' return self.RegexCheck( i, line, r'(?:^|\s|\()(const)\s', 'Use var instead of const.') def RunChecks(self): """Checks for violations of the Chromium JavaScript style guide. See: http://chromium.org/developers/web-development-style-guide#TOC-JavaScript """ old_path = sys.path old_filters = warnings.filters try: base_path = os.path.abspath(os.path.join( os.path.dirname(__file__), '..')) closure_linter_path = os.path.join( base_path, 'third_party', 'closure_linter') gflags_path = os.path.join( base_path, 'third_party', 'python_gflags') sys.path.insert(0, closure_linter_path) sys.path.insert(0, gflags_path) warnings.filterwarnings('ignore', category=DeprecationWarning) from closure_linter import runner, errors from closure_linter.common import errorhandler finally: sys.path = old_path warnings.filters = old_filters class ErrorHandlerImpl(errorhandler.ErrorHandler): """Filters out errors that don't apply to Chromium JavaScript code.""" def __init__(self): super(ErrorHandlerImpl, self).__init__() self._errors = [] self._filename = None def HandleFile(self, filename, _): self._filename = filename def HandleError(self, error): if self._Valid(error): error.filename = self._filename self._errors.append(error) def GetErrors(self): return self._errors def HasErrors(self): return bool(self._errors) def _Valid(self, error): """Checks whether an error is valid. Most errors are valid, with a few exceptions which are listed here. """ if re.search('</?(include|if)', error.token.line): return False # GRIT statement. if (error.code == errors.MISSING_SEMICOLON and error.token.string == 'of'): return False # ES6 for...of statement. return error.code not in [ errors.JSDOC_ILLEGAL_QUESTION_WITH_PIPE, errors.MISSING_JSDOC_TAG_THIS, errors.MISSING_MEMBER_DOCUMENTATION, ] results = [] affected_files = self.input_api.AffectedFiles( file_filter=self.file_filter, include_deletes=False) def ShouldCheck(f): if f.LocalPath().endswith('.js'): return True if f.LocalPath().endswith('.html'): return True return False affected_js_files = filter(ShouldCheck, affected_files) for f in affected_js_files: error_lines = [] contents = list(f.NewContents()) error_lines += CheckStrictMode( '\n'.join(contents), is_html_file=f.LocalPath().endswith('.html')) for i, line in enumerate(contents, start=1): error_lines += filter(None, [self.ConstCheck(i, line)]) # Use closure_linter to check for several different errors. import gflags as flags flags.FLAGS.strict = True error_handler = ErrorHandlerImpl() runner.Run(f.AbsoluteLocalPath(), error_handler) for error in error_handler.GetErrors(): highlight = _ErrorHighlight( error.token.start_index, error.token.length) error_msg = ' line %d: E%04d: %s\n%s\n%s' % ( error.token.line_number, error.code, error.message, error.token.line.rstrip(), highlight) error_lines.append(error_msg) if error_lines: error_lines = [ 'Found JavaScript style violations in %s:' % f.LocalPath()] + error_lines results.append( _MakeErrorOrWarning(self.output_api, '\n'.join(error_lines))) return results def _ErrorHighlight(start, length): """Produces a row of '^'s to underline part of a string.""" return start * ' ' + length * '^' def _MakeErrorOrWarning(output_api, error_text): return output_api.PresubmitError(error_text) def CheckStrictMode(contents, is_html_file=False): statements_to_check = [] if is_html_file: statements_to_check.extend(_FirstStatementsInScriptElements(contents)) else: statements_to_check.append(_FirstStatement(contents)) error_lines = [] for s in statements_to_check: if s != "'use strict'": error_lines.append('Expected "\'use strict\'" as first statement, ' 'but found "%s" instead.' % s) return error_lines def _FirstStatementsInScriptElements(contents): """Returns a list of first statements found in each <script> element.""" soup = parse_html.BeautifulSoup(contents) script_elements = soup.find_all('script', src=None) return [_FirstStatement(e.get_text()) for e in script_elements] def _FirstStatement(contents): """Extracts the first statement in some JS source code.""" stripped_contents = strip_js_comments.StripJSComments(contents).strip() matches = re.match('^(.*?);', stripped_contents, re.DOTALL) if not matches: return '' return matches.group(1).strip() def RunChecks(input_api, output_api, excluded_paths=None): def ShouldCheck(affected_file): if not excluded_paths: return True path = affected_file.LocalPath() return not any(re.match(pattern, path) for pattern in excluded_paths) return JSChecker(input_api, output_api, file_filter=ShouldCheck).RunChecks()
Python
0.000269
@@ -3505,32 +3505,308 @@ .of statement.%0A%0A + if (error.code == errors.LINE_STARTS_WITH_OPERATOR and%0A error.token.string == '*'):%0A return False # *%5B...%5D syntax%0A%0A if (error.code == errors.MISSING_SPACE and%0A error.token.string == '%5B'):%0A return False # *%5B...%5D syntax%0A%0A return e
ae0b0b7281fd301b00a9fde12a3b208c7dbfcab0
Fix bug caused by recycleing a variable
cats4all/cats_downloader.py
cats4all/cats_downloader.py
import os import time import imgurpython import requests import sqlite3 import itertools from collections import namedtuple import json DIR_NAME_FRMT = 'cats-%s' tag = 'cat' DB_FILE_NAME = 'cats2.db' ImageData = namedtuple('ImageData', ['id', 'link', 'title', 'height', 'width']) ImgurConfig = namedtuple('ImgurConfig', ['id', 'secret']) def get_config(config_path='./config.json'): with open(config_path, 'r') as config_file: config = json.load(config_file) # Perhaps: return ImgurConfig(**config) return ImgurConfig(config['id'], config['secret']) def init_db(table_name='cats'): db_connection = sqlite3.connect(DB_FILE_NAME) cursor = db_connection.cursor() cursor.execute('create table %s(image_id, date)' % (table_name,)) db_connection.commit() cursor.close() db_connection.close() def get_all_from__db(table_name='cats', db_file_name=DB_FILE_NAME): db_connection = sqlite3.connect(db_file_name) cursor = db_connection.cursor() cursor.execute('select * from %s' % (table_name,)) db_connection.commit() cursor.close() db_connection.close() def add_to_db(image_id, date, table_name='cats'): db_connection = sqlite3.connect(DB_FILE_NAME) cursor = db_connection.cursor() cursor.execute('insert into %s values (?, ?)' % (table_name,), (image_id, date)) db_connection.commit() db_connection.close() def add_bulk_to_db(image_iter, table_name='cats'): db_connection = sqlite3.connect(DB_FILE_NAME) cursor = db_connection.cursor() cursor.execute('insert into %s values (?, ?)' % (table_name,), image_iter) db_connection.commit() db_connection.close() def does_image_exist(image_id, table_name='cats'): db_connection = sqlite3.connect(DB_FILE_NAME) try: cursor = db_connection.cursor() cursor.execute('select * from %s where image_id=?' % (table_name,), (image_id,)) rows = cursor.fetchall() cursor.close() return len(rows) >= 1 finally: db_connection.close() def predicate(image): if image.height < 500 or image.width < 300: return False if image.height > 1200: return False return True def get_todays_dir(dir_frmt=DIR_NAME_FRMT): return dir_frmt % (time.strftime('%Y-%m-%d')) def remove_existing(images_data): nonexisiting_images = [] for i in images_data: if not does_image_exist(i.id): nonexisiting_images.append(i) return nonexisiting_images def flatten_items(items, client): for item in items: if item.is_album: album_images = client.get_album_images(item.id) for album_image in album_images: yield ImageData(album_image.id, album_image.link, item.title, album_image.height, album_image.width) elif type(item) is imgurpython.imgur.models.gallery_image.GalleryImage: yield ImageData(item.id, item.link, item.title, item.height, item.width) def get_images_data_by_tag(imgur_config, tag, num=150, sort='viral'): client = imgurpython.ImgurClient(imgur_config.id, imgur_config.secret) images_by_tag = client.gallery_tag(tag, sort=sort) images_data = (i for i in flatten_items(images_by_tag.items, client) if predicate(i)) return itertools.islice(images_data, num) def get_images_of_tag(imgur_config, tag, num=150, sort='viral'): current_page = 1 continue_download = True count_images = 0 while continue_download: images_data = get_images_data_by_tag(imgur_config, tag, num, sort) images_data = remove_existing(images_data) print len(images_data) count_images += len(images_data) curr_date = time.strftime('%Y-%m-%d') for i in images_data: file_name = '%s\\%s.jpg'%(get_todays_dir(), i.id) try: print i.title except UnicodeEncodeError as e: print 'Could not print image name. ID ' + i.id with open(file_name,'wb') as f: f.write(requests.get(i.link).content) add_to_db(i.id, curr_date) current_page += 1 if len(images_data) == 0 or count_images >= num: continue_download = False def main(): if not os.path.isfile(DB_FILE_NAME): init_db() cats_dir = get_todays_dir() if not os.path.isdir(cats_dir): os.makedirs(cats_dir) imgur_config = get_config() get_images_of_tag(imgur_config, tag, sort='time') if __name__ == '__main__': main()
Python
0.000001
@@ -129,16 +129,27 @@ rt json%0A +import pdb%0A %0A%0ADIR_NA @@ -2073,16 +2073,32 @@ image):%0A + return True%0A if i @@ -2355,16 +2355,56 @@ _data):%0A + print str(len(images_data)) + '!!!'%0A none @@ -3252,17 +3252,17 @@ _data = -( +%5B i for i @@ -3322,21 +3322,23 @@ icate(i) -) +%5D %0A + # return @@ -3372,16 +3372,101 @@ a, num)%0A + print len(images_data), '???'%0A # pdb.set_trace()%0A return images_data%5B:num%5D%0A %0A%0Adef ge @@ -3700,32 +3700,36 @@ sort)%0A%0A +new_ images_data = re @@ -3765,32 +3765,36 @@ print len( +new_ images_data)%0A @@ -3814,24 +3814,28 @@ ages += len( +new_ images_data) @@ -3891,32 +3891,36 @@ for i in +new_ images_data:%0A @@ -4295,24 +4295,24 @@ t_page += 1%0A - if l @@ -4310,24 +4310,28 @@ if len( +new_ images_data)
9911604243a1fb3612317fda91a61653ae396e20
Fix script permissions
Source/Documentation/Runme.py
Source/Documentation/Runme.py
#!/usr/bin/python import os import sys import shutil import subprocess def create_page(orig_path, page_name, page_header): orig = open(orig_path) dest = open("Temp/" + os.path.split(orig_path)[1] + ".txt", "w") dest.write("/** @page " + page_name + " " + page_header + "\n") dest.write(orig.read()) dest.write("\n*/") orig.close() dest.close() beforeDir = os.getcwd() scriptDir = os.path.dirname(sys.argv[0]) os.chdir(scriptDir) # create Legal page if os.path.isdir("Temp"): shutil.rmtree("Temp") os.mkdir("Temp") create_page("../../NOTICE", "legal", "Legal Stuff & Acknowledgments") create_page("../../ReleaseNotes.txt", "release_notes", "Release Notes") errfile = "Temp/doxy_error" subprocess.check_call(["doxygen", "Doxyfile"], stdout=open(os.devnull,"w"), stderr=open(errfile,"w")) os.chdir(beforeDir)
Python
0.000001
00c07a76185aff4873abe3eb814b847b6d02f58f
fix date
02_BasicDataTypes/04_find-second-maximum-number-in-a-list.py
02_BasicDataTypes/04_find-second-maximum-number-in-a-list.py
#!/usr/bin/python3.6 """Jerod Gawne, 2017-09-12 Find the Second Largest Number https://www.hackerrank.com/challenges/find-second-maximum-number-in-a-list/ Editorial: - There are many ways to solve this problem. This can be solved by maintaining two variables max and second_max. Iterate through the list and find the maximum and store it. Iterate again and find the next maximum value by having an if condition that checks if it's not equal to first maximum. Create a counter from the given array. Extract the keys, sort them and print the second last element. Transform the list to a set and then list again, removing all the duplicates. Then sort the list and print the second last element. """ def main(): """ Main/Tests """ input() print(sorted(set(map(int, input().split())))[-2]) if __name__ == '__main__': try: main() except Exception: import sys import traceback print(traceback.print_exception(*sys.exc_info()))
Python
0.00188
@@ -42,10 +42,10 @@ -09- -1 2 +8 %0A%0AFi
7d92018ee8952998ef8d19ec342c4a898c633687
Fix character matching
Firmware/sampler.py
Firmware/sampler.py
#!/usr/bin/python2 # run openocd (0.9.0) with : # $ openocd -f interface/stlink-v2.cfg -f target/stm32f4x.cfg &> /dev/null & # then run # $ python2 sampler.py path_to_myelf_with_symbols # ctrl-c to stop sampling. # To terminate the openocd session, enter command "fg" then do ctrl-c. import sys import time import telnetlib import subprocess from bisect import bisect_right import operator class OpenOCDCMSampler(object): def __init__(self, host='localhost', port=4444): self.net = telnetlib.Telnet(host, port) self.net.read_very_eager() self.table = [] self.indexes = set() def __del__(self): self.net.write(b'exit\r\n') self.net.read_until(b'exit\r\n', 1) self.net.close() def getpc(self): self.net.write(b'mrw 0xE000101C\r\n') res = self.net.read_until(b'\r\n\r> ', 1) if res: prefix = res[0:16] num = res[16:-5] res = res[-15:0] if prefix == b'mrw 0xE000101C\r\n': return int(num) return 0 def initSymbols(self, elf, symbol_dump_cmd='arm-none-eabi-nm'): proc = subprocess.Popen([symbol_dump_cmd, '-CS', '--size-sort', elf], stdout=subprocess.PIPE) for line in proc.stdout.readlines(): field = line.split() try: # For using nm -CS if field[2] in ('t', 'T', 'w', 'W'): addr = int(field[0], 16) func = field[3] size = int(field[1], 16) # # For using readelf -s # if field[3] == b'FUNC': # addr = int(field[1], 16) - 1 # For some reason readelf dumps the func addr off by 1 # func = field[7] # size = int(field[2]) if addr not in self.indexes: self.table.append((addr, func, size)) self.indexes.add(addr) except IndexError: pass self.table.sort() self.addrs = [ x for (x, y, z) in self.table ] def func(self, pc): if pc == 0 or pc == 0xFFFFFFFF: return ('', 0) i = bisect_right(self.addrs, pc) if i: addr, symb, size = self.table[i-1] if pc >= addr and pc <= addr + size: return (symb, addr) return ('', 0) if __name__ == '__main__': sampler = OpenOCDCMSampler('localhost', 4444) sampler.initSymbols(sys.argv[1]) total = 0 countmap = { } pcmap = { } funcmap = { } start = time.time() try: while True: pc = sampler.getpc() if pc in pcmap: pcmap[pc] += 1 else: pcmap[pc] = 1 func, addr = sampler.func(pc) if(func == 'ADC_IRQ_Dispatch'): funcmap[pc] = 1 if not addr: continue if func in countmap: countmap[func] += 1 total += 1 else: countmap[func] = 1 total += 1 cur = time.time() if cur - start > 5.0: # tmp = sorted(funcmap) # for k in tmp: # print(hex(k)) tmp = sorted(countmap.items(), key=operator.itemgetter(1)) #, reverse=True) for k, v in tmp: print('{:05.2f}% {}'.format((v * 100.) / total, k)) # print('{:06.2f} clocks : {}'.format((v * 10500) / total, k)) start = cur print('{} Samples'.format(total)) print('') # total = 0 # countmap = { } # pcmap = { } except KeyboardInterrupt: pcmap = sorted(pcmap.items(), key=operator.itemgetter(1), reverse=True) pcmap = [(hex(addr), count) for addr, count in pcmap]
Python
0.000004
@@ -1408,23 +1408,27 @@ in ( +b 't', +b 'T', +b 'w', +b 'W')
0c305e8bd8624af2d2ce1ca292bb26095d0e1dcc
Fix __eq__ method on ExecuteContainer to compare only proper classes
cekit/descriptor/execute.py
cekit/descriptor/execute.py
import yaml import cekit from cekit.descriptor import Descriptor execute_schemas = [yaml.safe_load(""" map: name: {type: str} script: {type: str} user: {type: text}""")] container_schemas = [yaml.safe_load(""" seq: - {type: any}""")] class Execute(Descriptor): def __init__(self, descriptor, module_name): self.schemas = execute_schemas super(Execute, self).__init__(descriptor) descriptor['directory'] = module_name if 'user' not in descriptor: descriptor['user'] = cekit.DEFAULT_USER descriptor['module_name'] = module_name if 'name' not in descriptor: descriptor['name'] = "%s/%s" % (module_name, descriptor['script']) class ExecuteContainer(Descriptor): """Container holding Execute classes. I't responsible for correct Execute Class merging and ordering""" def __init__(self, descriptor, module_name): self.schemas = container_schemas super(ExecuteContainer, self).__init__(descriptor) self.name = module_name if not descriptor: descriptor = [{'name': 'noop'}] self._descriptor = [Execute(x, module_name) for x in descriptor] def _get_real_executes(self): return [x for x in self._descriptor if x['name'] != 'noop'] def __len__(self): return len(self._get_real_executes()) def __iter__(self): return iter(self._get_real_executes()) def merge(self, descriptor): """To merge modules in correct order we need to insert new executes before the last module. This the raeson why noop execut exists""" prev_module = self._descriptor[-1]['module_name'] pos = 0 for executes in self._descriptor: if executes['module_name'] == prev_module: continue pos += 1 for executes in reversed(list(descriptor)): if executes not in self._descriptor: self._descriptor.insert(pos, executes) def __eq__(self, other): for i, execute in enumerate(self._descriptor): if execute != other[i]: return False return True
Python
0
@@ -2117,24 +2117,99 @@ lf, other):%0A + if not isinstance(other, self.__class__):%0A return False%0A for
b0d88fccf51f240800fda462bddb9d934931c078
Update tests.py
intermediate_words_search_python/tests.py
intermediate_words_search_python/tests.py
# imports from solution from main import solution from utils import show_path def tests(): for search_method in ('BFS', 'A*'): opts = { 'search_method': search_method } assert solution('cat', 'dog', opts) == ('cat', 'cot', 'dot', 'dog') assert solution('cat', 'dot', opts) == ('cat', 'cot', 'dot') assert solution('cat', 'cot', opts) == ('cat', 'cot') assert solution('cat', 'cat', opts) == ('cat', ) assert solution('fan', 'for', opts) == ('fan', 'fin', 'fir', 'for') assert solution('place', 'places', opts) == ('place', 'places') assert solution('duck', 'dusty', opts) == ('duck', 'dusk', 'dust', 'dusty') assert solution('duck', 'ducked', opts) is None assert solution('rue', 'be', opts) == ('rue', 'run', 'runt', 'bunt', 'bent', 'beet', 'bee', 'be') assert solution('rue', 'defuse', opts) == ('rue', 'ruse', 'reuse', 'refuse', 'defuse') not_a_word_1 = 'NotAWord' assert solution('rue', not_a_word_1, opts) is None not_a_word_2 = 'plar' assert solution(not_a_word_2, 'play', opts) == (not_a_word_2, 'play') not_a_word_3 = 'blah' assert solution(not_a_word_3, 'defuse', opts) is None return 'tests pass!' if __name__ == '__main__': print tests()
Python
0.000001
@@ -39,24 +39,42 @@ ort solution +, LOCAL_DICTIONARY %0Afrom utils @@ -90,16 +90,1041 @@ ow_path%0A +from time import time%0A%0A%0ATEST_CASES = (%0A # start word, target word, minimal path length%0A ( 'cat', 'dog', 4 ),%0A ( 'cat', 'mistrial', 9 ),%0A ( 'strong', 'weak', 7 ),%0A ( 'hot', 'cold', 4 ),%0A ( 'up', 'down', 5 ),%0A ( 'left', 'right', 7 ),%0A ( 'light', 'heavy', 10 ),%0A ( 'computer', 'virus', 12 ),%0A ( 'strike', 'freeze', 6 ),%0A ( 'fan', 'for', 3 ),%0A ( 'duck', 'dusty', 4 ),%0A ( 'rue', 'be', 3 ),%0A ( 'rue', 'defuse', 5 ),%0A ( 'rue', 'bend', 5 ),%0A ( 'zoologist', 'zoology', None ) # no path; these two words are disjoint%0A)%0A%0A%0Adef tests2():%0A t0 = time()%0A opts = %7B 'search_method': 'A*', 'dictionary_filename': LOCAL_DICTIONARY %7D%0A%0A for start_word,target_word,path_len in TEST_CASES:%0A path = solution(start_word, target_word, opts)%0A assert (len(path) if path else None) == path_len%0A%0A return 'tests pass in %7B%7D seconds!'.format(time() - t0)%0A%0A %0A%0Adef te @@ -2332,16 +2332,18 @@ __':%0A + # print t @@ -2349,8 +2349,27 @@ tests()%0A + print tests2()%0A
446d578ec9765a22478abecc7df526f666f5e57c
remove 'devel' in filename and add 'runtime'
cerbero/packages/android.py
cerbero/packages/android.py
# cerbero - a multi-platform build system for Open Source software # Copyright (C) 2012 Andoni Morales Alastruey <ylatuya@gmail.com> # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Library General Public # License as published by the Free Software Foundation; either # version 2 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Library General Public License for more details. # # You should have received a copy of the GNU Library General Public # License along with this library; if not, write to the # Free Software Foundation, Inc., 59 Temple Place - Suite 330, # Boston, MA 02111-1307, USA. import os import tarfile import zipfile from cerbero.packages.disttarball import DistTarball from cerbero.errors import UsageError class AndroidPackager(DistTarball): ''' Creates a distribution tarball for Android ''' def __init__(self, config, package, store): DistTarball.__init__(self, config, package, store) def _create_tarball(self, output_dir, package_type, files, force, package_prefix): filenames = [] # Create the bz2 file first filename = os.path.join(output_dir, self._get_name(package_type)) if os.path.exists(filename): if force: os.remove(filename) else: raise UsageError("File %s already exists" % filename) tar = tarfile.open(filename, "w:bz2") for f in files: filepath = os.path.join(self.prefix, f) tar.add(filepath, os.path.join(package_prefix, f)) tar.close() filenames.append(filename) # Create the zip file for windows filename = os.path.join(output_dir, self._get_name(package_type, ext='zip')) if os.path.exists(filename): if force: os.remove(filename) else: raise UsageError("File %s already exists" % filename) zipf = zipfile.ZipFile(filename, 'w') for f in files: filepath = os.path.join(self.prefix, f) zipf.write(filepath, os.path.join(package_prefix, f), compress_type=zipfile.ZIP_DEFLATED) zipf.close() filenames.append(filename) return ' '.join(filenames) def register(): from cerbero.packages.packager import register_packager from cerbero.config import Distro register_packager(Distro.ANDROID, AndroidPackager)
Python
0
@@ -897,16 +897,57 @@ ipfile%0A%0A +from cerbero.packages import PackageType%0A from cer @@ -2563,16 +2563,442 @@ names)%0A%0A + def _get_name(self, package_type, ext='tar.bz2'):%0A if package_type == PackageType.DEVEL:%0A package_type = ''%0A elif package_type == PackageType.RUNTIME:%0A package_type = '-runtime'%0A%0A return %22%25s%25s-%25s-%25s-%25s%25s.%25s%22 %25 (self.package_prefix, self.package.name,%0A self.config.target_platform, self.config.target_arch,%0A self.package.version, package_type, ext)%0A%0A %0Adef reg
22f7c82d63fb7ccc9226d734bdacd4a47e96da89
Fix the `clear_app` command for Django 1.8
ixdjango/management/commands/clear_app.py
ixdjango/management/commands/clear_app.py
""" Management command to clear specified app's models of data. .. moduleauthor:: Infoxchange Development Team <development@infoxchange.net.au> """ from __future__ import print_function from django.core.management.base import BaseCommand from django.core.management.color import no_style from django.db import connection, transaction # pylint:disable=no-name-in-module from django.db.models import get_app, get_model, get_models # pylint:disable=protected-access real_print = print # pylint:disable=invalid-name def print(*args, **kwargs): # pylint:disable=redefined-builtin """ Only print if required """ if kwargs.pop('verbosity') >= 1: real_print(*args, **kwargs) class Command(BaseCommand): """ A command to clear app data. """ help = ('Cleans the specified applications\' tables to a pristine state.') def handle(self, *targets, **options): verbosity = int(options['verbosity']) models = [] for target in targets: target = target.split('.') try: app, = target model = None except ValueError: app, model = target if model: models.append(get_model(app, model)) else: app_models = [ model for model in get_models(get_app(app), include_auto_created=True) if model._meta.managed ] models += app_models print("Found %d model(s) for %s" % (len(app_models), app), verbosity=verbosity) with transaction.atomic(): for model in models: print("Clearing %s table %s" % ( model, model._meta.db_table), verbosity=verbosity) cursor = connection.cursor() cursor.execute('TRUNCATE TABLE {} CASCADE'.format( model._meta.db_table)) sql = connection.ops.sequence_reset_sql(no_style(), [model]) for cmd in sql: connection.cursor().execute(cmd) print("Cleared %d models" % len(models), verbosity=verbosity)
Python
0.00069
@@ -854,16 +854,58 @@ state.') +%0A args = '%3Capp_label%3E %3Capp_label%3E ... ' %0A%0A de
6c4219e92611ed0f6721e86ca3d24a28c30825c2
add 15 min total
cgi-bin/precip/snetRates.py
cgi-bin/precip/snetRates.py
#!/mesonet/python/bin/python import mx.DateTime, cgi, sys from pyIEM import iemdb i = iemdb.iemdb() mydb = i['snet'] def diff(nowVal, pastVal, mulli): if (nowVal < 0 or pastVal < 0): return "%5s," % ("M") differ = nowVal - pastVal if differ < 0: return "%5s," % ("M") return "%5.2f," % (differ * mulli) def Main(): form = cgi.FormContent() year = form["year"][0] month = form["month"][0] day = form["day"][0] station = form["station"][0][:5] s = mx.DateTime.DateTime(int(year), int(month), int(day)) e = s + mx.DateTime.RelativeDateTime(days=+1) interval = mx.DateTime.RelativeDateTime(minutes=+1) print 'Content-type: text/plain\n\n' print "SID , DATE ,TIME ,PCOUNT,60min ,30min ,20min ,15min ,10min , 5min , 1min ," rs = mydb.query("SELECT station, valid, pday from t%s WHERE \ station = '%s' and date(valid) = '%s' ORDER by valid ASC" \ % (s.strftime("%Y_%m"), station, s.strftime("%Y-%m-%d") ) ).dictresult() pcpn = [-1]*(24*60) if (len(rs) == 0): print 'NO RESULTS FOUND FOR THIS DATE!' sys.exit(0) lminutes = 0 lval = 0 for i in range(len(rs)): ts = mx.DateTime.strptime(rs[i]['valid'][:16], "%Y-%m-%d %H:%M") minutes = int((ts - s).minutes) val = float(rs[i]['pday']) pcpn[minutes] = val if ((val - lval) < 0.02): for b in range(lminutes, minutes): pcpn[b] = val lminutes = minutes lval = val for i in range(len(pcpn)): ts = s + (interval * i) print "%s,%s," % (rs[0]['station'], ts.strftime("%Y-%m-%d,%H:%M") ), if (pcpn[i] < 0): print "%5s," % ("M"), else: print "%5.2f," % (pcpn[i],), if (i >= 60): print diff(pcpn[i], pcpn[i-60], 1), else: print "%5s," % (" "), if (i >= 30): print diff(pcpn[i], pcpn[i-30], 2), else: print "%5s," % (" "), if (i >= 20): print diff(pcpn[i], pcpn[i-20], 3), else: print "%5s," % (" "), if (i >= 15): print diff(pcpn[i], pcpn[i-15], 4), else: print "%5s," % (" "), if (i >= 10): print diff(pcpn[i], pcpn[i-10], 6), else: print "%5s," % (" "), if (i >= 5): print diff(pcpn[i], pcpn[i-5], 12), else: print "%5s," % (" "), if (i >= 1): print diff(pcpn[i], pcpn[i-1], 60), else: print "%5s," % (" "), print Main()
Python
0.000229
@@ -699,16 +699,23 @@ ,PCOUNT, +P15MIN, 60min ,3 @@ -1637,24 +1637,123 @@ pcpn%5Bi%5D,),%0A%0A + if (i %3E= 15):%0A print diff(pcpn%5Bi%5D, pcpn%5Bi-15%5D, 1),%0A else:%0A print %22%255s,%22 %25 (%22 %22),%0A%0A if (i %3E=
32e83559e00b7d5a363585d599cd087af854c445
Support custom initializer in links.CRF1d
chainer/links/loss/crf1d.py
chainer/links/loss/crf1d.py
from chainer.functions.loss import crf1d from chainer import link from chainer import variable class CRF1d(link.Link): """Linear-chain conditional random field loss layer. This link wraps the :func:`~chainer.functions.crf1d` function. It holds a transition cost matrix as a parameter. Args: n_label (int): Number of labels. .. seealso:: :func:`~chainer.functions.crf1d` for more detail. Attributes: cost (~chainer.Variable): Transition cost parameter. """ def __init__(self, n_label): super(CRF1d, self).__init__() with self.init_scope(): self.cost = variable.Parameter(0, (n_label, n_label)) def forward(self, xs, ys, reduce='mean'): return crf1d.crf1d(self.cost, xs, ys, reduce) def argmax(self, xs): """Computes a state that maximizes a joint probability. Args: xs (list of Variable): Input vector for each label. Returns: tuple: A tuple of :class:`~chainer.Variable` representing each log-likelihood and a list representing the argmax path. .. seealso:: See :func:`~chainer.frunctions.crf1d_argmax` for more detail. """ return crf1d.argmax_crf1d(self.cost, xs)
Python
0
@@ -531,16 +531,28 @@ n_label +, initialW=0 ):%0A @@ -663,11 +663,79 @@ ter( -0, +initializer=initialW,%0A shape= (n_l
b0a03411dac3f933e4198d053de74cd7b31fd3e5
update message
chainercv/utils/download.py
chainercv/utils/download.py
from __future__ import division from __future__ import print_function import hashlib import os import shutil import tarfile import tempfile import zipfile import filelock from six.moves.urllib import request import sys import time from chainer.dataset.download import get_dataset_directory from chainer.dataset.download import get_dataset_root def _reporthook(count, block_size, total_size): global start_time if count == 0: start_time = time.time() return duration = time.time() - start_time progress_size = int(count * block_size) try: speed = int(progress_size / (1024 * duration)) except ZeroDivisionError: speed = float('inf') percent = int(count * block_size * 100 / total_size) sys.stdout.write('\r...{}, {} MB, {} KB/s, {} seconds passed'.format( percent, progress_size / (1024 * 1024), speed, duration)) sys.stdout.flush() def cached_download(url): """Downloads a file and caches it. This is different from the original ``cached_download`` in that the download progress is reported. It downloads a file from the URL if there is no corresponding cache. After the download, this function stores a cache to the directory under the dataset root (see :func:`set_dataset_root`). If there is already a cache for the given URL, it just returns the path to the cache without downloading the same file. Args: url (str): URL to download from. Returns: str: Path to the downloaded file. """ cache_root = os.path.join(get_dataset_root(), '_dl_cache') try: os.makedirs(cache_root) except OSError: if not os.path.exists(cache_root): raise lock_path = os.path.join(cache_root, '_dl_lock') urlhash = hashlib.md5(url.encode('utf-8')).hexdigest() cache_path = os.path.join(cache_root, urlhash) with filelock.FileLock(lock_path): if os.path.exists(cache_path): return cache_path print('Fetching the file size from {:s} ...'.format(url)) req = request.Request(url=url, method='HEAD') with request.urlopen(req) as res: total = int(res.getheader('Content-Length')) print('Downloaded file will be saved to {:s} .'.format(cache_path)) print('This will be use {:.2f} MiB of the disk space.'.format( total / (1 << 20))) while True: b = input('Proceed? (y/N): ') if b in {'y', 'Y'}: break elif b in {'', 'n', 'N'}: raise KeyboardInterrupt else: print('Please answer \'y\' or \'n\'.') temp_root = tempfile.mkdtemp(dir=cache_root) try: temp_path = os.path.join(temp_root, 'dl') print('Downloading from {} ...'.format(url)) request.urlretrieve(url, temp_path, _reporthook) with filelock.FileLock(lock_path): shutil.move(temp_path, cache_path) finally: shutil.rmtree(temp_root) return cache_path def download_model(url): """Downloads a model file and puts it under model directory. It downloads a file from the URL and puts it under model directory. For exmaple, if :obj:`url` is `http://example.com/subdir/model.npz`, the pretrained weights file will be saved to `$CHAINER_DATASET_ROOT/pfnet/chainercv/models/model.npz`. If there is already a file at the destination path, it just returns the path without downloading the same file. Args: url (str): URL to download from. Returns: str: Path to the downloaded file. """ root = get_dataset_directory( os.path.join('pfnet', 'chainercv', 'models')) basename = os.path.basename(url) path = os.path.join(root, basename) if not os.path.exists(path): cache_path = cached_download(url) os.rename(cache_path, path) return path def extractall(file_path, destination, ext): """Extracts an archive file. This function extracts an archive file to a destination. Args: file_path (str): The path of a file to be extracted. destination (str): A directory path. The archive file will be extracted under this directory. ext (str): An extension suffix of the archive file. This function supports :obj:`'.zip'`, :obj:`'.tar'`, :obj:`'.gz'` and :obj:`'.tgz'`. """ if ext == '.zip': with zipfile.ZipFile(file_path, 'r') as z: z.extractall(destination) elif ext == '.tar': with tarfile.TarFile(file_path, 'r') as t: t.extractall(destination) elif ext == '.gz' or ext == '.tgz': with tarfile.open(file_path, 'r:gz') as t: t.extractall(destination)
Python
0
@@ -610,20 +610,25 @@ size / ( -1024 +(1 %3C%3C 10) * durat @@ -774,35 +774,63 @@ ite( -'%5Cr...%7B%7D, %7B +%0A '%5Cr... %7B:d%7D %25, %7B:.2f %7D M +i B, %7B +:.2f %7D K +i B/s, %7B +:.1f %7D se @@ -843,32 +843,36 @@ passed'.format(%0A + percent, @@ -894,18 +894,14 @@ / (1 -024 * 1024 + %3C%3C 20 ), s
8214d516b3feba92ab3ad3b1f2fa1cf253e83012
Remove use of deprecated `scan_plugins` method
pyexcel/internal/__init__.py
pyexcel/internal/__init__.py
""" pyexcel.internal ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Pyexcel internals that subjected to change :copyright: (c) 2015-2017 by Onni Software Ltd. :license: New BSD License """ from lml.loader import scan_plugins from pyexcel.internal.plugins import PARSER, RENDERER # noqa from pyexcel.internal.source_plugin import SOURCE # noqa from pyexcel.internal.generators import SheetStream, BookStream # noqa BLACK_LIST = [ "pyexcel_io", "pyexcel_webio", "pyexcel_xlsx", "pyexcel_xls", "pyexcel_ods3", "pyexcel_ods", "pyexcel_odsr", "pyexcel_xlsxw", ] WHITE_LIST = [ "pyexcel.plugins.parsers", "pyexcel.plugins.renderers", "pyexcel.plugins.sources", ] scan_plugins("pyexcel_", "pyexcel", BLACK_LIST, WHITE_LIST)
Python
0.000004
@@ -734,18 +734,25 @@ gins +_regex (%22 +%5E pyexcel_ %22, %22 @@ -747,16 +747,19 @@ pyexcel_ +.+$ %22, %22pyex
e85e63ad0eaf34ebb08805b308bd01f398387492
handle string type input
pypeerassets/crypto/ecdsa.py
pypeerassets/crypto/ecdsa.py
from hashlib import sha256 from binascii import hexlify from random import SystemRandom, randrange class PrivateKey: def __init__(self, privkey=None): self.p = 0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f self.n = 0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 Gx = 0x79be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798 Gy = 0x483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8 self.g = (Gx, Gy) if privkey is None: self.privkey = SystemRandom().randrange(1, self.n) self.private_key = '{:0>64x}'.format(self.privkey).encode() else: if isinstance(privkey, bytes): self.privkey = int.from_bytes(privkey, byteorder='big') try: int(privkey.decode(),16) self.private_key = privkey except: self.private_key = hexlify(privkey) self.public_key = self.pubkey() def hash_message(self, message): message_hash = sha256(sha256(message).digest()).digest() e = int.from_bytes(message_hash, 'big') if e.bit_length() > self.n.bit_length(): z = e >> (e.bit_length() - self.n.bit_length()) else: z = e << (self.n.bit_length() - e.bit_length()) assert z.bit_length() <= self.n.bit_length() return z def sign_message(self, message): ''' takes message input as string ''' if isinstance(message, str): message = message.encode() z = self.hash_message(message) r = 0 s = 0 while not r or not s: privkey = randrange(1, self.n) x, y = scalar_mult(privkey, self.g, self.p, self.n) r = x % self.n s = ((z + r * self.privkey) * inverse_mod(privkey, self.n)) % self.n return (r, s) def verify_signature(self, message, signature, pubkey=None): ''' takes message input as string and signature input as tuple ( r, s )''' z = self.hash_message(message.encode()) if pubkey is None: pubkey = self.pubkey(compressed=False) r, s = signature w = inverse_mod(s, self.n) u1 = (z * w) % self.n u2 = (r * w) % self.n x, y = point_add(scalar_mult(u1, self.g,self.p, self.n), scalar_mult(u2, pubkey, self.p, self.n), self.p) if (r % self.n) == (x % self.n): return True else: return False def pubkey(self, compressed=True): x, y = scalar_mult(self.privkey, self.g, self.p, self.n) if not compressed: return (x, y) x = '{:0>64x}'.format(x).encode() if not (y % 2): prefix = b'02' else: prefix = b'03' return prefix + x def make_keypair(self): """Generates a random private-public key pair.""" self.privkey = SystemRandom().randrange(1, self.n) self.public_key = self.pubkey() self.g = self.pubkey(compressed=False) self.private_key = '{:0>64x}'.format(self.privkey).encode() return {"private_key": self.private_key, "public_key": self.public_key} def is_on_curve(point, p): if point is None: return True x, y = point return (y * y - x * x * x - 7) % p == 0 def point_neg(point, p): if point is None: return None x, y = point result = (x, -y % p) return result def point_add(point1, point2, p): if point1 is None: return point2 if point2 is None: return point1 x1, y1 = point1 x2, y2 = point2 if x1 == x2 and y1 != y2: return None if x1 == x2: m = (3 * x1 * x1) * inverse_mod(2 * y1, p) else: m = (y1 - y2) * inverse_mod(x1 - x2, p) x3 = m * m - x1 - x2 y3 = y1 + m * (x3 - x1) result = (x3 % p, -y3 % p) return result def inverse_mod(privkey, p): if privkey == 0: raise ZeroDivisionError('division by zero') if privkey < 0: return p - inverse_mod(-privkey, p) s, old_s = 0, 1 t, old_t = 1, 0 r, old_r = p, privkey while r != 0: quotient = old_r // r old_r, r = r, old_r - quotient * r old_s, s = s, old_s - quotient * s old_t, t = t, old_t - quotient * t gcd, x, y = old_r, old_s, old_t assert gcd == 1 assert (privkey * x) % p == 1 return x % p def scalar_mult(privkey, point, p, n): assert is_on_curve(point, p) if privkey % n == 0 or point is None: return None if privkey < 0: return scalar_mult(-privkey, point_neg(point, p), p, n) result = None addend = point while privkey: if privkey & 1: result = point_add(result, addend, p) addend = point_add(addend, addend, p) privkey >>= 1 assert is_on_curve(result, p) return result
Python
0.000056
@@ -820,62 +820,85 @@ -try:%0A int(privkey.decode(),16)%0A +self.private_key = hexlify(privkey)%0A if isinstance(privkey, str):%0A @@ -922,54 +922,31 @@ priv -ate_ key = +int( privkey -%0A except:%0A +, 16)%0A @@ -976,31 +976,31 @@ e_key = -hexlify(privkey +privkey.encode( )%0A%0A
708e105713d7fd480b4b45d3ef31a46e35e63a8e
annotate calculate_tx_fee.
pypeerassets/transactions.py
pypeerassets/transactions.py
'''transaction assembly/dissasembly''' from time import time from math import ceil from btcpy.structs.address import Address from btcpy.structs.transaction import TxOut, TxIn, Sequence, Locktime, MutableTransaction from btcpy.structs.script import StackData, ScriptSig, NulldataScript, ScriptSig, ScriptPubKey from btcpy.structs.script import P2pkhScript, MultisigScript, P2shScript from .networks import query def calculate_tx_fee(tx_size: int): '''return tx fee from tx size in bytes''' min_fee = 0.01 # minimum return ceil(tx_size / 1000) * min_fee def nulldata_script(data: bytes): '''create nulldata (OP_return) script''' stack = StackData.from_bytes(data) return NulldataScript(stack) def p2pkh_script(address: str): '''create pay-to-key-hash (P2PKH) script''' addr = Address.from_string(address) return P2pkhScript(addr) def tx_output(value: float, seq: int, script: ScriptSig): '''create TxOut object''' return TxOut(int(value * 1000000), seq, script) def make_raw_transaction(inputs: list, outputs: list, locktime=Locktime(0), timestamp: int=int(time()), version=1): '''create raw transaction''' return MutableTransaction(version, timestamp, inputs, outputs, locktime) def find_parent_outputs(provider, utxo: TxIn): '''due to design of the btcpy library, TxIn object must be converted to TxOut object before signing''' index = utxo.txout # utxo index return TxOut.from_json(provider.getrawtransaction(utxo.txid)['vout'][index])
Python
0.000001
@@ -442,16 +442,25 @@ ze: int) + -%3E float :%0A ''
56e4c14ea6e2266bb8fa6f25ef1c0a3b2123f5ad
fix py3k
pystacia/image/_impl/blur.py
pystacia/image/_impl/blur.py
# coding: utf-8 # pystacia/image/_impl/blur.py # Copyright (C) 2011-2012 by Paweł Piotr Przeradowski # This module is part of Pystacia and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php def _make_radius_strength_bias(c_name, names, order=None): def function(image, *args): kwargs = dict(zip(names, args)) if kwargs['strength'] is None: kwargs['strength'] = kwargs['radius'] if 'bias' in kwargs and kwargs['bias'] is None: kwargs['bias'] = 0 order_ = order or names values = [kwargs[k] for k in order_] c_call(image, c_name, *values) return function blur = _make_radius_strength_bias('blur', ['radius', 'strength']) gaussian_blur = _make_radius_strength_bias( 'gaussian_blur', ['radius', 'strength', 'bias']) motion_blur = _make_radius_strength_bias( 'motion_blur', ['radius', 'angle', 'strength', 'bias'], ['radius', 'strength', 'angle', 'bias']) adaptive_blur = _make_radius_strength_bias( 'adaptive_blur', ['radius', 'strength', 'bias']) sharpen = _make_radius_strength_bias( 'sharpen', ['radius', 'strength', 'bias']) adaptive_sharpen = _make_radius_strength_bias( 'adaptive_sharpen', ['radius', 'strength', 'bias']) detect_edges = _make_radius_strength_bias('edge', ['radius', 'strength']) #TODO: moving center here def radial_blur(image, angle): """Performs radial blur. :param angle: Blur angle in degrees :type angle: ``float`` Radial blurs image within given angle. This method can be chained. """ c_call(image, 'radial_blur', angle) def denoise(image): """Attempt to remove noise preserving edges. Applies a digital filter that improves the quality of a noisy image. This method can be chained. """ c_call(image, 'enhance') def despeckle(image): """Attempt to remove speckle preserving edges. Resulting image almost solid color areas are smoothed preserving edges. This method can be chained. """ c_call(image, 'despeckle') emboss = _make_radius_strength_bias('emboss', ['radius', 'strength']) from pystacia.api.func import c_call
Python
0.000002
@@ -224,16 +224,50 @@ e.php%0A%0A%0A +from future_builtins import zip%0A%0A%0A def _mak
27273335422781dcee950ee081ed13e53816e6d6
Bump version
pytablewriter/__version__.py
pytablewriter/__version__.py
__author__ = "Tsuyoshi Hombashi" __copyright__ = f"Copyright 2016, {__author__}" __license__ = "MIT License" __version__ = "0.62.0" __maintainer__ = __author__ __email__ = "tsuyoshi.hombashi@gmail.com"
Python
0
@@ -120,17 +120,17 @@ _ = %220.6 -2 +3 .0%22%0A__ma
349f975e257192458b1944753d4f609869b9e8d6
use yield_fixture to support pytest on py2.7
python-cim/tests/fixtures.py
python-cim/tests/fixtures.py
import os import pytest import cim import cim.objects @pytest.fixture def repopath(): """ Returns: str: path to the repos/win7/deleted-instance repository """ cd = os.path.dirname(os.path.abspath(__file__)) return os.path.join(cd, 'repos', 'win7', 'deleted-instance') @pytest.fixture def repo(): """ Returns: cim.CIM: repos/win7/deleted-instance repository """ return cim.CIM(cim.CIM_TYPE_WIN7, repopath()) @pytest.fixture def root(): r = repo() with cim.objects.Namespace(r, cim.objects.ROOT_NAMESPACE_NAME) as ns: yield ns
Python
0
@@ -460,32 +460,38 @@ th())%0A%0A%0A@pytest. +yield_ fixture%0Adef root
fe4c66b2e50035ab2701923d6a2cd0cb82e63780
Fix call mkl gemm in mkldnn.py (#7007)
python/tvm/contrib/mkldnn.py
python/tvm/contrib/mkldnn.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """External function interface to BLAS libraries.""" import tvm from tvm import te def matmul(lhs, rhs, transa=False, transb=False, **kwargs): """Create an extern op that compute matrix mult of A and rhs with CrhsLAS This function serves as an example on how to call external libraries. Parameters ---------- lhs: Tensor The left matrix operand rhs: Tensor The right matrix operand transa: bool Whether transpose lhs transb: bool Whether transpose rhs Returns ------- C: Tensor The result tensor. """ n = lhs.shape[1] if transa else lhs.shape[0] m = rhs.shape[0] if transb else rhs.shape[1] return te.extern( (n, m), [lhs, rhs], lambda ins, outs: tvm.tir.call_packed( "tvm.contrib.mkl.matmul", ins[0], ins[1], outs[0], transa, transb ), name="C", **kwargs, )
Python
0
@@ -1602,16 +1602,19 @@ trib.mkl +dnn .matmul%22
a2097bf7c3103ec6e0482bb34f3b33c753ea7889
Enable QtNetwork test for PySide
qtpy/tests/test_qtnetwork.py
qtpy/tests/test_qtnetwork.py
from __future__ import absolute_import import pytest from qtpy import PYSIDE, PYSIDE2, QtNetwork @pytest.mark.skipif(PYSIDE2 or PYSIDE, reason="It fails on PySide/PySide2") def test_qtnetwork(): """Test the qtpy.QtNetwork namespace""" assert QtNetwork.QAbstractNetworkCache is not None assert QtNetwork.QNetworkCacheMetaData is not None assert QtNetwork.QHttpMultiPart is not None assert QtNetwork.QHttpPart is not None assert QtNetwork.QNetworkAccessManager is not None assert QtNetwork.QNetworkCookie is not None assert QtNetwork.QNetworkCookieJar is not None assert QtNetwork.QNetworkDiskCache is not None assert QtNetwork.QNetworkReply is not None assert QtNetwork.QNetworkRequest is not None assert QtNetwork.QNetworkConfigurationManager is not None assert QtNetwork.QNetworkConfiguration is not None assert QtNetwork.QNetworkSession is not None assert QtNetwork.QAuthenticator is not None assert QtNetwork.QHostAddress is not None assert QtNetwork.QHostInfo is not None assert QtNetwork.QNetworkAddressEntry is not None assert QtNetwork.QNetworkInterface is not None assert QtNetwork.QNetworkProxy is not None assert QtNetwork.QNetworkProxyFactory is not None assert QtNetwork.QNetworkProxyQuery is not None assert QtNetwork.QAbstractSocket is not None assert QtNetwork.QLocalServer is not None assert QtNetwork.QLocalSocket is not None assert QtNetwork.QTcpServer is not None assert QtNetwork.QTcpSocket is not None assert QtNetwork.QUdpSocket is not None assert QtNetwork.QSslCertificate is not None assert QtNetwork.QSslCipher is not None assert QtNetwork.QSslConfiguration is not None assert QtNetwork.QSslError is not None assert QtNetwork.QSslKey is not None assert QtNetwork.QSslSocket is not None
Python
0
@@ -97,84 +97,8 @@ k%0A%0A%0A -@pytest.mark.skipif(PYSIDE2 or PYSIDE, reason=%22It fails on PySide/PySide2%22)%0A def @@ -261,32 +261,71 @@ ata is not None%0A + if not PYSIDE and not PYSIDE2:%0A assert QtNet @@ -348,32 +348,36 @@ art is not None%0A + assert QtNet @@ -1530,32 +1530,55 @@ ket is not None%0A + if not PYSIDE:%0A assert QtNet @@ -1602,32 +1602,36 @@ ate is not None%0A + assert QtNet @@ -1654,32 +1654,36 @@ is not None%0A + + assert QtNetwork @@ -1709,32 +1709,36 @@ is not None%0A + + assert QtNetwork @@ -1756,32 +1756,36 @@ is not None%0A + + assert QtNetwork @@ -1797,32 +1797,36 @@ Key is not None%0A + assert QtNet
cd323386b61cd280fcf3e599ae6a02b889f81a40
Remove support for old style api urls
raven/contrib/django/urls.py
raven/contrib/django/urls.py
""" raven.contrib.django.urls ~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from __future__ import absolute_import try: from django.conf.urls import url except ImportError: # for Django version less than 1.4 from django.conf.urls.defaults import url # NOQA import raven.contrib.django.views urlpatterns = ( url(r'^api/(?:(?P<project_id>[\w_-]+)/)?store/$', raven.contrib.django.views.report, name='raven-report'), url(r'^report/', raven.contrib.django.views.report), )
Python
0
@@ -443,11 +443,8 @@ i/(? -:(? P%3Cpr @@ -465,10 +465,8 @@ %5D+)/ -)? stor
d3e9e7f6873e54f9657d1fdc9c3339b49c4936ae
Update tests to use required request argument
rcamp/tests/test_lib_auth.py
rcamp/tests/test_lib_auth.py
from django.conf import settings import mock import pam from lib.pam_backend import PamBackend from tests.utilities.ldap import ( LdapTestCase, get_ldap_user_defaults ) from accounts.models import ( RcLdapUser, User ) # This test case covers functionality in the custom PAM Auth Backend class PamBackendTestCase(LdapTestCase): def setUp(self): self.pb = PamBackend() super(PamBackendTestCase,self).setUp() @mock.patch('pam.pam.authenticate',mock.MagicMock(return_value=True)) def test_authenticate(self): rc_user_defaults = get_ldap_user_defaults() RcLdapUser.objects.create(organization='ucb',**rc_user_defaults) rc_user = RcLdapUser.objects.get(username='testuser') self.assertRaises(User.DoesNotExist, User.objects.get, username='testuser') user = self.pb.authenticate(username='testuser',password='passwd') self.assertIsNotNone(user) self.assertEqual(user.username,rc_user.username) self.assertEqual(user.first_name,rc_user.first_name) self.assertEqual(user.last_name,rc_user.last_name) self.assertEqual(user.email,rc_user.email) reauthed_user = self.pb.authenticate(username='testuser',password='passwd') self.assertEqual(reauthed_user,user) self.assertFalse(reauthed_user.is_staff) @mock.patch('pam.pam.authenticate',mock.MagicMock(return_value=False)) def test_authenticate_failed(self): rc_user_defaults = get_ldap_user_defaults() RcLdapUser.objects.create(organization='ucb',**rc_user_defaults) self.assertRaises(User.DoesNotExist, User.objects.get, username='testuser') user = self.pb.authenticate(username='testuser',password='badpasswd') self.assertIsNone(user) self.assertRaises(User.DoesNotExist, User.objects.get, username='testuser') @mock.patch('pam.pam.authenticate',mock.MagicMock(return_value=True)) def test_authenticate_update_user(self): rc_user_defaults = get_ldap_user_defaults() RcLdapUser.objects.create(organization='ucb',**rc_user_defaults) rc_user = RcLdapUser.objects.get(username='testuser') self.assertRaises(User.DoesNotExist, User.objects.get, username='testuser') user = self.pb.authenticate(username='testuser',password='passwd') self.assertIsNotNone(user) rc_user.first_name = 'pamtested' rc_user.save(organization='ucb',) user = self.pb.authenticate(username='testuser',password='passwd') self.assertEqual(user.first_name,'pamtested') self.assertFalse(user.is_staff) @mock.patch('pam.pam.authenticate',mock.MagicMock(return_value=True)) def test_get_user(self): rc_user_defaults = get_ldap_user_defaults() RcLdapUser.objects.create(organization='ucb',**rc_user_defaults) self.assertRaises(User.DoesNotExist, User.objects.get, username='testuser') user = self.pb.authenticate(username='testuser',password='passwd') self.assertIsNotNone(user) user = self.pb.get_user(user.id) self.assertEqual(user.username, 'testuser')
Python
0
@@ -868,32 +868,38 @@ pb.authenticate( +None, username='testus @@ -1222,32 +1222,38 @@ pb.authenticate( +None, username='testus @@ -1746,32 +1746,38 @@ pb.authenticate( +None, username='testus @@ -2385,32 +2385,38 @@ pb.authenticate( +None, username='testus @@ -2586,32 +2586,38 @@ pb.authenticate( +None, username='testus @@ -3106,16 +3106,22 @@ nticate( +None, username @@ -3268,20 +3268,65 @@ ername, 'testuser')%0A +%0A%0A# class LDAPBackendTestCase(LdapTestCase):%0A
15865668659ebee4cb49a28d09f9ff1d67d2a96d
Set issue to in progress when sending issue
rdmo/projects/views/issue.py
rdmo/projects/views/issue.py
import logging from django.conf import settings from django.core.mail import EmailMessage from django.http import HttpResponseRedirect from django.views.generic import DetailView, UpdateView from rdmo.core.views import ObjectPermissionMixin, RedirectViewMixin from ..forms import IssueMailForm from ..models import Issue logger = logging.getLogger(__name__) class IssueUpdateView(ObjectPermissionMixin, RedirectViewMixin, UpdateView): model = Issue queryset = Issue.objects.all() fields = ('status', ) permission_required = 'projects.change_issue_object' def get_permission_object(self): return self.get_object().project class IssueSendView(ObjectPermissionMixin, RedirectViewMixin, DetailView): queryset = Issue.objects.all() permission_required = 'projects.change_issue_object' template_name = 'projects/issue_send.html' def get_permission_object(self): return self.get_object().project def get_context_data(self, **kwargs): if 'form' not in kwargs: kwargs['form'] = IssueMailForm(initial={ 'subject': self.object.task.title, 'message': self.object.task.text, 'cc_myself': True }) context = super().get_context_data(**kwargs) context['integrations'] = self.get_object().project.integrations.all() return context def post(self, request, *args, **kwargs): self.object = self.get_object() integration_id = request.POST.get('integration') if integration_id: # send via integration integration = self.get_object().project.integrations.get(pk=integration_id) return integration.provider.send_issue(request, integration.options_dict, self.object) else: # send via mail form = IssueMailForm(request.POST) if form.is_valid(): from_email = settings.DEFAULT_FROM_EMAIL to_emails = form.cleaned_data.get('recipients') + form.cleaned_data.get('recipients_input', []) cc_emails = [request.user.email] if form.cleaned_data.get('cc_myself') else [] reply_to = [request.user.email] subject = form.cleaned_data.get('subject') message = form.cleaned_data.get('message') EmailMessage(subject, message, from_email, to_emails, cc=cc_emails, reply_to=reply_to).send() return HttpResponseRedirect(self.get_object().project.get_absolute_url()) else: return self.render_to_response(self.get_context_data(form=form))
Python
0
@@ -1465,16 +1465,104 @@ bject()%0A + self.object.status = Issue.ISSUE_STATUS_IN_PROGRESS%0A self.object.save()%0A%0A
d62d12d2d274c7e065c48155c4dda6d9d135e4e8
Use java method to get max count of steps
Server/integrations/cas2_duo/Cas2DuoExternalAuthenticator.py
Server/integrations/cas2_duo/Cas2DuoExternalAuthenticator.py
# oxAuth is available under the MIT License (2008). See http://opensource.org/licenses/MIT for full text. # Copyright (c) 2018, Gluu # # Author: Yuriy Movchan # from org.xdi.service.cdi.util import CdiUtil from org.xdi.oxauth.security import Identity from org.xdi.model.custom.script.type.auth import PersonAuthenticationType from org.xdi.oxauth.service import UserService, AuthenticationService from org.xdi.util import ArrayHelper, StringHelper from java.util import ArrayList, Arrays from Cas2ExternalAuthenticator import PersonAuthentication as Cas2ExternalAuthenticator from DuoExternalAuthenticator import PersonAuthentication as DuoExternalAuthenticator class PersonAuthentication(PersonAuthenticationType): def __init__(self, currentTimeMillis): self.currentTimeMillis = currentTimeMillis self.cas2ExternalAuthenticator = Cas2ExternalAuthenticator(currentTimeMillis) self.duoExternalAuthenticator = DuoExternalAuthenticator(currentTimeMillis) def init(self, configurationAttributes): print "CAS2 + Duo. Initialization" cas2_result = self.cas2ExternalAuthenticator.init(configurationAttributes) duo_result = self.duoExternalAuthenticator.init(configurationAttributes) print "CAS2 + Duo. Initialized successfully" return cas2_result and duo_result def destroy(self, configurationAttributes): print "CAS2 + Duo. Destroy" cas2_result = self.cas2ExternalAuthenticator.destroy(configurationAttributes) duo_result = self.duoExternalAuthenticator.destroy(configurationAttributes) print "CAS2 + Duo. Destroyed successfully" return cas2_result and duo_result def getApiVersion(self): return 1 def isValidAuthenticationMethod(self, usageType, configurationAttributes): cas2_result = self.cas2ExternalAuthenticator.isValidAuthenticationMethod(usageType, configurationAttributes) duo_result = self.duoExternalAuthenticator.isValidAuthenticationMethod(usageType, configurationAttributes) return cas2_result and duo_result def getAlternativeAuthenticationMethod(self, usageType, configurationAttributes): cas2_result = self.cas2ExternalAuthenticator.getAlternativeAuthenticationMethod(usageType, configurationAttributes) if cas2_result != None: return cas2_result duo_result = self.duoExternalAuthenticator.getAlternativeAuthenticationMethod(usageType, configurationAttributes) if duo_result != None: return duo_result return None def authenticate(self, configurationAttributes, requestParameters, step): result = False start_duo = False if step == 1: # Execute CAS2 for step #1 result = self.cas2ExternalAuthenticator.authenticate(configurationAttributes, requestParameters, step) # Execute DUO prepareForStep and authenticate for step #1 if needed cas2_count_steps = self.cas2ExternalAuthenticator.getCountAuthenticationSteps(configurationAttributes) if cas2_count_steps == 1: result = result and self.duoExternalAuthenticator.prepareForStep(configurationAttributes, requestParameters, step) result = result and self.duoExternalAuthenticator.authenticate(configurationAttributes, requestParameters, step) elif step == 2: # Execute CAS2 for step #2 if needed cas2_count_steps = self.cas2ExternalAuthenticator.getCountAuthenticationSteps(configurationAttributes) if cas2_count_steps == 2: result = self.cas2ExternalAuthenticator.authenticate(configurationAttributes, requestParameters, step) # Execute DUO prepareForStep and authenticate for step #1 result = result and self.duoExternalAuthenticator.prepareForStep(configurationAttributes, requestParameters, 1) result = result and self.duoExternalAuthenticator.authenticate(configurationAttributes, requestParameters, 1) else: duo_count_steps = self.duoExternalAuthenticator.getCountAuthenticationSteps(configurationAttributes) if duo_count_steps == 2: result = self.duoExternalAuthenticator.authenticate(configurationAttributes, requestParameters, step) elif step == 3: # Execute DUO for step #2 if needed duo_count_steps = self.duoExternalAuthenticator.getCountAuthenticationSteps(configurationAttributes) if duo_count_steps == 2: result = self.duoExternalAuthenticator.authenticate(configurationAttributes, requestParameters, 2) return result def prepareForStep(self, configurationAttributes, requestParameters, step): result = False # Execute CAS2 for step #1 if step == 1: # Execute CAS2 for step #1 result = self.cas2ExternalAuthenticator.prepareForStep(configurationAttributes, requestParameters, step) elif step == 2: # Execute CAS2 for step #2 if needed cas2_count_steps = self.cas2ExternalAuthenticator.getCountAuthenticationSteps(configurationAttributes) if cas2_count_steps == 2: result = self.cas2ExternalAuthenticator.prepareForStep(configurationAttributes, requestParameters, step) else: # Execute DUO for step #2 if needed duo_count_steps = self.duoExternalAuthenticator.getCountAuthenticationSteps(configurationAttributes) if duo_count_steps == 2: result = self.duoExternalAuthenticator.prepareForStep(configurationAttributes, requestParameters, step) elif step == 3: # Execute DUO for step #2 if needed duo_count_steps = self.duoExternalAuthenticator.getCountAuthenticationSteps(configurationAttributes) if duo_count_steps == 2: result = self.duoExternalAuthenticator.prepareForStep(configurationAttributes, requestParameters, 2) return result def getExtraParametersForStep(self, configurationAttributes, step): cas2_result = self.cas2ExternalAuthenticator.getExtraParametersForStep(configurationAttributes, step) duo_result = self.duoExternalAuthenticator.getExtraParametersForStep(configurationAttributes, step) if cas2_result == None: return duo_result if duo_result == None: return cas2_result result_list = ArrayList() result_list.addAll(cas2_result) result_list.addAll(duo_result) return result_list def getCountAuthenticationSteps(self, configurationAttributes): cas2_count_steps = self.cas2ExternalAuthenticator.getCountAuthenticationSteps(configurationAttributes) duo_count_steps = self.duoExternalAuthenticator.getCountAuthenticationSteps(configurationAttributes) print "CAS2 + Duo. Get count authentication steps. cas2_count_steps = %s, duo_count_steps = %s" % (cas2_count_steps, duo_count_steps) if (cas2_count_steps == 1) and (duo_count_steps == 1): return 1 if (cas2_count_steps == 2) and (duo_count_steps == 2): return 3 return max(cas2_count_steps, duo_count_steps) def getPageForStep(self, configurationAttributes, step): result = "" if step == 1: result = self.cas2ExternalAuthenticator.getPageForStep(configurationAttributes, step) elif step == 2: cas2_count_steps = self.cas2ExternalAuthenticator.getCountAuthenticationSteps(configurationAttributes) if cas2_count_steps == 2: result = self.cas2ExternalAuthenticator.getPageForStep(configurationAttributes, step) else: result = self.duoExternalAuthenticator.getPageForStep(configurationAttributes, step) elif step == 3: result = self.duoExternalAuthenticator.getPageForStep(configurationAttributes, step) return result def logout(self, configurationAttributes, requestParameters): cas2_result = self.cas2ExternalAuthenticator.logout(configurationAttributes, requestParameters) duo_result = self.duoExternalAuthenticator.logout(configurationAttributes, requestParameters) return cas2_result and duo_result
Python
0.000001
@@ -7268,16 +7268,22 @@ return + Math. max(cas
80c0f29d7b81a68d14e261ce1c062d37a0e6d4f7
Increase ARM process startup time baseline
tests/integration_tests/performance/test_process_startup_time.py
tests/integration_tests/performance/test_process_startup_time.py
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 """Test that the process startup time up to socket bind is within spec.""" import json import os import platform import time import host_tools.logging as log_tools MAX_STARTUP_TIME_CPU_US = {'x86_64': 5500, 'aarch64': 2600} """ The maximum acceptable startup time in CPU us. """ # TODO: Keep a `current` startup time in S3 and validate we don't regress def test_startup_time(test_microvm_with_api): """Check the startup time for jailer and Firecracker up to socket bind.""" microvm = test_microvm_with_api microvm.spawn() microvm.basic_config(vcpu_count=2, mem_size_mib=1024) # Configure metrics. metrics_fifo_path = os.path.join(microvm.path, 'metrics_fifo') metrics_fifo = log_tools.Fifo(metrics_fifo_path) response = microvm.metrics.put( metrics_path=microvm.create_jailed_resource(metrics_fifo.path) ) assert microvm.api_session.is_status_no_content(response.status_code) microvm.start() time.sleep(0.4) # The metrics fifo should be at index 1. # Since metrics are flushed at InstanceStart, the first line will suffice. lines = metrics_fifo.sequential_reader(1) metrics = json.loads(lines[0]) startup_time_us = metrics['api_server']['process_startup_time_us'] cpu_startup_time_us = metrics['api_server']['process_startup_time_cpu_us'] print('Process startup time is: {} us ({} CPU us)' .format(startup_time_us, cpu_startup_time_us)) assert cpu_startup_time_us > 0 assert cpu_startup_time_us <= MAX_STARTUP_TIME_CPU_US[platform.machine()]
Python
0
@@ -327,17 +327,17 @@ ch64': 2 -6 +8 00%7D%0A%22%22%22
0a80fa2e610637a6c657f99c8eac5d99e33b5844
Use /usr/bin/env in #!
lines2jsonarray.py
lines2jsonarray.py
#!/usr/bin/python from __future__ import print_function import sys print('[') for i, line in enumerate(sys.stdin): if i != 0: print(',') print(line) print(']')
Python
0
@@ -4,16 +4,20 @@ usr/bin/ +env python%0Af
2918aeaf34774528def5b798dadff2767e63c111
Allow case sensitivity change in CLI
linguistica/cli.py
linguistica/cli.py
# -*- encoding: utf8 -*- import sys import os from pprint import pformat import linguistica as lxa from linguistica.util import ENCODING lxa_version = lxa.__version__ def main(): print('\nWelcome to Linguistica {}!'.format(lxa_version)) # -------------------------------------------------------------------------- # determine if file is a wordlist or a corpus text use_wordlist_response = None while use_wordlist_response is None: use_wordlist_response = input('\nAre you using a wordlist file? [N/y] ') if use_wordlist_response and use_wordlist_response[0].lower() == 'y': use_wordlist = True else: use_wordlist = False # -------------------------------------------------------------------------- # get file path file_abspath = None while file_abspath is None: file_path = input('\nPath to your file: ') if sys.platform.startswith('win'): file_path = file_path.replace('/', os.sep) else: file_path = file_path.replace('\\', os.sep) file_abspath = os.path.abspath(file_path) if not os.path.isfile(file_abspath): print('Invalid file path!') file_abspath = None print('\nFull file path:\n{}'.format(file_abspath)) # -------------------------------------------------------------------------- # determine output directory output_dir = os.path.join(os.path.dirname(file_abspath), 'lxa_outputs') print('\nDefault output directory:\n{}'.format(output_dir)) change_dir_response = None while change_dir_response is None: change_dir_response = input('Change it? [N/y] ') if change_dir_response and change_dir_response[0].lower() == 'y': new_output_dir = None while new_output_dir is None: new_output_dir = input('Specify output directory: ') if sys.platform.startswith('win'): new_output_dir = new_output_dir.replace('/', os.sep) else: new_output_dir = new_output_dir.replace('\\', os.sep) new_output_dir = os.path.abspath(new_output_dir) if not os.path.isdir(new_output_dir): try: os.mkdir(new_output_dir) except FileNotFoundError: print('Cannot make a new directory in a non-existing one!') new_output_dir = None output_dir = new_output_dir if not os.path.isdir(output_dir): os.mkdir(output_dir) # -------------------------------------------------------------------------- # change encoding, if instructed encoding = ENCODING print('\nDefault encoding for input and output files:', encoding) change_encoding_ans = None while change_encoding_ans is None: change_encoding_ans = input('Change encoding? [N/y] ') if change_encoding_ans and change_encoding_ans[0].lower() == 'y': new_encoding = None while new_encoding is None: new_encoding = input('New encoding: ') if not new_encoding: new_encoding = None encoding = new_encoding # -------------------------------------------------------------------------- # create the Linguistica object if use_wordlist: lxa_object = lxa.read_wordlist(file_abspath, encoding=encoding) else: lxa_object = lxa.read_corpus(file_abspath, encoding=encoding) # -------------------------------------------------------------------------- # change parameters, if instructed print('\nParameters:\n{}'.format(pformat(lxa_object.parameters()))) change_parameters_ans = None while change_parameters_ans is None: change_parameters_ans = input('\nChange any parameters? [N/y] ') new_parameter_value_pairs = list() if change_parameters_ans and change_parameters_ans[0].lower() == 'y': print('\nEnter parameter-value pairs\n' '(e.g. "min_stem_length=3 max_affix_length=3" without quotes):') parameter_value_str = None while not parameter_value_str: parameter_value_str = input() for parameter_value in parameter_value_str.split(): try: parameter, value = parameter_value.split('=') except ValueError: print('Invalid parameter-value pair: ' + parameter_value) parameter_value_str = None break if parameter not in lxa_object.parameters(): print('Unknown parameter: ', parameter) parameter_value_str = None break try: value_int = int(value) except ValueError: print('Cannot parse {} as an integer for parameter {}' .format(value, parameter)) parameter_value_str = None break new_parameter_value_pairs.append((parameter, value_int)) if new_parameter_value_pairs: lxa_object.change_parameters(**dict(new_parameter_value_pairs)) print('\nParameters after the changes:\n{}' .format(pformat(lxa_object.parameters()))) # -------------------------------------------------------------------------- # run all Linguistica modules on the given file print('\nRunning all Linguistica modules on the given file:') lxa_object.run_all_modules(verbose=True) # -------------------------------------------------------------------------- # output results as files print('\nGenerating output files...\n') lxa_object.output_all_results(directory=output_dir, verbose=True) print('\nResults are in ' + output_dir)
Python
0.000005
@@ -3157,16 +3157,509 @@ coding%0A%0A + # --------------------------------------------------------------------------%0A # change case-sensitivity, if instructed%0A%0A keep_case = False%0A print('%5CnDefault behavior for case sensitivity: False%5Cn'%0A '(e.g. %22the%22 and %22The%22 are collapsed)')%0A%0A change_case_ans = None%0A while change_case_ans is None:%0A change_case_ans = input('Switch to %22True%22 for case sensitivity? %5BN/y%5D ')%0A%0A if change_case_ans and change_case_ans%5B0%5D.lower() == 'y':%0A keep_case = True%0A%0A # -- @@ -3859,16 +3859,76 @@ encoding +,%0A keep_case=keep_case )%0A el @@ -3995,24 +3995,82 @@ ing=encoding +,%0A keep_case=keep_case )%0A%0A # ---
6cae1c77094f91443781f33b04abe96541739599
hello2
robo/test/visual_test.py
robo/test/visual_test.py
import GPy import matplotlib; matplotlib.use('GTKAgg') import matplotlib.pyplot as plt import numpy as np import os from robo.models.GPyModel import GPyModel from robo.acquisition.EI import EI from robo.maximizers.maximize import stochastic_local_search from robo.recommendation.incumbent import compute_incumbent from robo.visualization import plotting as plotting # The optimization function that we want to optimize. It gets a numpy array with shape (N,D) where N >= 1 are the number of datapoints and D are the number of features def objective_function(x): return np.sin(3 * x) * 4 * (x - 1) * (x + 2) def run(): # Defining the bounds and dimensions of the input space X_lower = np.array([0]) X_upper = np.array([6]) dims = 1 # Set the method that we will use to optimize the acquisition function maximizer = stochastic_local_search # Defining the method to model the objective function kernel = GPy.kern.Matern52(input_dim=dims) model = GPyModel(kernel, optimize=True, noise_variance=1e-4, num_restarts=10) # The acquisition function that we optimize in order to pick a new x acquisition_func = EI(model, X_upper=X_upper, X_lower=X_lower, compute_incumbent=compute_incumbent, par=0.1) # par is the minimum improvement that a point has to obtain # Draw one random point and evaluate it to initialize BO X = np.array([np.random.uniform(X_lower, X_upper, dims)]) Y = objective_function(X) # Fit the model on the data we observed so far model.train(X, Y) # Update the acquisition function model with the retrained model acquisition_func.update(model) # Optimize the acquisition function to obtain a new point new_x = maximizer(acquisition_func, X_lower, X_upper) # Evaluate the point and add the new observation to our set of previous seen points new_y = objective_function(np.array(new_x)) X = np.append(X, new_x, axis=0) Y = np.append(Y, new_y, axis=0) # Visualize the objective function, model and the acquisition function fig = plt.figure() #Sub plot for the model and the objective function ax1 = fig.add_subplot(2,1,1) #Sub plot for the acquisition function ax2 = fig.add_subplot(2,1,2) resolution = 0.1 # Call plot_model function ax1=plotting.plot_model(model,X_lower,X_upper,ax1,resolution,'b','blue',"Prosterior Mean",3,True) #Call plot_objective_function ax1=plotting.plot_objective_function(objective_function,X_lower,X_upper,X,Y,ax1,resolution,'black','ObjectiveFunction',True) ax1.set_title("Model + Objective Function") #Call plot_acquisition_function ax2=plotting.plot_acquisition_function(acquisition_func,X_lower,X_upper,X,ax2,resolution,"AcquisitionFunction",True) plt.savefig('test2.png') os.system('eog test2.png&')
Python
0.999986
@@ -2776,16 +2776,17 @@ 2.png')%0A +%0A os.s
759f2510617813b3257e56fd422448a9670df3a3
Add plugin startup commands.
rplugin/python3/cmake.py
rplugin/python3/cmake.py
import neovim from pathlib import Path import subprocess cmake_build_info = { "old_cmake_files": [ Path("CMakeCache.txt"), Path("cmake_install.cmake"), Path("Makefile"), Path("compile_commands.json") ], "old_cmake_dir": Path("CMakeFiles"), "cmake_proj": Path("CMakeLists.txt"), "build_dir": Path("build"), "comp_data_cmake": Path("build/compile_commands.json") } cmake_cmd_info = { "cmake_cmd": ["cmake", "-DCMAKE_EXPORT_COMPILE_COMMANDS=ON", ".."], "rdm_cmd": ["rdm", "--silent", "--daemon"], "rtags_shutdwn": ["rc", "--quit-rdm"], "rc_cmd": ["rc", "-J", str(cmake_build_info["build_dir"])] } def removeDirtyDir(): if cmake_build_info["build_dir"].is_dir(): print("Cleaning up Build Directory") subprocess.call(["rm", "-rf", str(cmake_build_info["build_dir"])], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) def removeOldCMakeFiles(): if cmake_build_info["old_cmake_dir"].is_dir(): print("Cleaning up Old CMake Directory") subprocess.call(["rm", "-rf", str(cmake_build_info["old_cmake_dir"])], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) for path in cmake_build_info["old_cmake_files"]: if path.is_file(): print("Cleaning up Old CMake Files") subprocess.call(["rm", str(path)], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) def run_cmake(): print("Running CMake") try: subprocess.check_call(["mkdir", "build"], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) except subprocess.CalledProcessError as e: print(e.output) print("Can\'t setup CMake build directory.") raise subprocess.check_call(cmake_cmd_info["cmake_cmd"], cwd="build", stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) if not cmake_build_info["comp_data_cmake"].is_file(): print("Couldn't setup CMake Project") #try: # subprocess.check_call(cmake_cmd_info["cmake_cmd"], cwd="build") #except subprocess.CalledProcessError as e: # print(e.output) # print("CMake Failed.") # raise #else: # print("Error Generating Compilation Database With CMake") # raise def setup_rtags_daemon(): print("Initializing RTags Daemon") try: subprocess.check_call(cmake_cmd_info["rtags_shutdwn"], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) except subprocess.CalledProcessError as e: print(e.output) print("Info: RTags Daemon Not Running") subprocess.check_call(cmake_cmd_info["rdm_cmd"], cwd="..", stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) #try: # subprocess.check_call(cmake_cmd_info["rdm_cmd"], cwd="..") #except subprocess.CalledProcessError as e: # print(e.output) # print("Couldn\'t start the RTags daemon.") # raise def connect_rtags_client(): print("Connecting RTags Client") if cmake_build_info["comp_data_cmake"].is_file(): subprocess.check_call(cmake_cmd_info["rc_cmd"], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) #try: # subprocess.check_call(cmake_cmd_info["rc_cmd"]) #except subprocess.CalledProcessError as e: # print(e.output) # print("Couldn\'t connect the RTags client.") # raise else: print("Error Generating Compilation Database With CMake") @neovim.plugin class CMakeRTagsProject(object): def __init__(self, vim): self.vim = vim @neovim.command('CMakeProjectSetup', sync=True) def run_cmake_setup_rtags(self): removeOldCMakeFiles() if cmake_build_info["build_dir"].is_dir(): removeDirtyDir() if cmake_build_info["cmake_proj"].is_file(): self.vim.command('echo "Starting CMake Project"') run_cmake() setup_rtags_daemon() connect_rtags_client() self.vim.command('ChromaticaStart') else: self.vim.command('echo "Not a CMake Project"')
Python
0
@@ -678,16 +678,117 @@ %22%5D)%5D%0A%7D%0A%0A +plugin_cmd_info = %7B%0A %22chromatica%22: %22ChromaticaStart%22,%0A %22deoplete%22: %22call deoplete#enable()%22%0A%7D%0A%0A %0Adef rem @@ -4276,42 +4276,72 @@ -self.vim.command('ChromaticaStart' +for cmd in plugin_cmd_info:%0A self.vim.command(cmd )%0A
14884a18b42dab22e5893c619164049a475888cc
fix traceback on bank accounts creation
addons/account_bank_statement_import/wizard/setup_wizards.py
addons/account_bank_statement_import/wizard/setup_wizards.py
# -*- coding: utf-8 -*- from odoo import models, fields, api class SetupBarBankConfigWizard(models.TransientModel): _inherit = 'account.setup.bank.manual.config' def validate(self): """ Default the bank statement source of new bank journals as 'file_import' """ super(SetupBarBankConfigWizard, self).validate() if self.create_or_link_option == 'new' or self.linked_journal_id.bank_statements_source == 'undefined' \ and self.env['account.journal']._get_bank_statements_available_import_formats(): self.linked_journal_id.bank_statements_source = 'file_import'
Python
0.000001
@@ -351,16 +351,17 @@ if +( self.cre @@ -451,16 +451,17 @@ defined' +) %5C%0A
b12868d32bb295e5518265fb184e549c3f5c79c2
Use static.mybinder.org for badge image (#334)
sphinx_gallery/binder.py
sphinx_gallery/binder.py
# -*- coding: utf-8 -*- # Author: Chris Holdgraf # License: 3-clause BSD """ Binder utility functions ======================== Integration with Binder is on an experimental stage. Note that this API may change in the future. .. warning:: Binder is still beta technology, so there may be instability in the experience of users who click Binder links. """ import shutil as sh import os try: basestring except NameError: basestring = str unicode = str from .utils import replace_py_ipynb def gen_binder_url(fname, binder_conf): """Generate a Binder URL according to the configuration in conf.py. Parameters ---------- fname: str The path to the `.py` file for which a Binder badge will be generated. binder_conf: dict | None The Binder configuration dictionary. See `gen_binder_rst` for details. Returns ------- binder_url : str A URL that can be used to direct the user to the live Binder environment. """ # Build the URL binder_fpath = '_downloads/{}'.format(replace_py_ipynb(fname)) binder_url = binder_conf['url'] binder_url = '/'.join([binder_conf['url'], 'v2', 'gh', binder_conf['org'], binder_conf['repo'], binder_conf['branch']]) binder_url += '?filepath={}'.format(binder_fpath) return binder_url def gen_binder_rst(fname, binder_conf): """Generate the RST + link for the Binder badge. Parameters ---------- fname: str The path to the `.py` file for which a Binder badge will be generated. binder_conf: dict | None If a dictionary it must have the following keys: 'url': The URL of the BinderHub instance that's running a Binder service. 'org': The GitHub organization to which the documentation will be pushed. 'repo': The GitHub repository to which the documentation will be pushed. 'branch': The Git branch on which the documentation exists (e.g., gh-pages). 'dependencies': A list of paths to dependency files that match the Binderspec. Returns ------- rst : str The reStructuredText for the Binder badge that links to this file. """ binder_url = gen_binder_url(fname, binder_conf) rst = ( "\n" " .. container:: binder-badge\n\n" " .. image:: https://mybinder.org/badge.svg\n" " :target: {}\n" " :width: 150 px\n").format(binder_url) return rst def copy_binder_reqs(app): """Copy Binder requirements files to a "binder" folder in the docs.""" binder_conf = app.config.sphinx_gallery_conf['binder'] path_reqs = binder_conf.get('dependencies') binder_folder = os.path.join(app.builder.outdir, 'binder') if not os.path.isdir(binder_folder): os.makedirs(binder_folder) for path in path_reqs: sh.copy(os.path.join(app.builder.srcdir, path), binder_folder) def check_binder_conf(binder_conf): """Check to make sure that the Binder configuration is correct.""" # Grab the configuration and return None if it's not configured binder_conf = {} if binder_conf is None else binder_conf if not isinstance(binder_conf, dict): raise ValueError('`binder_conf` must be a dictionary or None.') if len(binder_conf) == 0: return binder_conf # Ensure all fields are populated req_values = ['url', 'org', 'repo', 'branch', 'dependencies'] missing_values = [] for val in req_values: if binder_conf.get(val) is None: missing_values.append(val) if len(missing_values) > 0: raise ValueError('binder_conf is missing values for: {}'.format( missing_values)) # Ensure we have http in the URL if not any(binder_conf['url'].startswith(ii) for ii in ['http://', 'https://']): raise ValueError('did not supply a valid url, ' 'gave url: {}'.format(binder_conf['url'])) # Ensure we have at least one dependency file # Need at least one of these two files required_reqs_files = ['requirements.txt', 'environment.yml'] path_reqs = binder_conf['dependencies'] if isinstance(path_reqs, basestring): path_reqs = [path_reqs] binder_conf['dependencies'] = path_reqs elif not isinstance(path_reqs, (list, tuple)): raise ValueError("`dependencies` value should be a list of strings. " "Got type {}.".format(type(path_reqs))) path_reqs_filenames = [os.path.basename(ii) for ii in path_reqs] if not any(ii in path_reqs_filenames for ii in required_reqs_files): raise ValueError( 'Did not find one of `requirements.txt` or `environment.yml` ' 'in the "dependencies" section of the binder configuration ' 'for sphinx-gallery. A path to at least one of these files ' 'must exist in your Binder dependencies.') return binder_conf
Python
0
@@ -2383,16 +2383,17 @@ r_conf)%0A +%0A rst @@ -2485,16 +2485,23 @@ https:// +static. mybinder
0b707c137aef4c6ad6ddd27b00585388b152666c
fix skitaid shutdown
skitai/skitaid.py
skitai/skitaid.py
#!/usr/bin/python3 # 2014. 12. 9 by Hans Roh hansroh@gmail.com __version__ = "0.8.8.1" version_info = tuple (map (lambda x: not x.isdigit () and x or int (x), __version__.split ("."))) import sys import subprocess import os import signal import time from aquests.lib import confparse, logger, flock, pathtool from skitai.server.wastuff import process, daemon import time class Service (daemon.Daemon): BACKOFF_MAX_INTERVAL = 600 CLEAN_SHUTDOWNED = {} RESTART_QUEUE = {} DAEMONS = ("smtpda", "cron") def __init__ (self, cmd, logpath, varpath, verbose): self.cmd = cmd self.logpath = logpath self.varpath = varpath self.consol = verbose self.make_logger (False) self.backoff_start_time = None self.backoff_interval = 5 self.child = None def set_backoff (self, reset = False): if reset: if self.backoff_start_time is None: return else: self.backoff_start_time = None self.backoff_interval = 5 return if self.backoff_start_time is None: self.backoff_start_time = time.time () def shutdown (self): self.logger ("[info] try to kill %s..." % self.child.name) self.child.kill () for i in range (30): time.sleep (1) if self.child.poll () is None: self.logger ("[info] %s is still alive" % self.child.name) else: break if self.child.poll () is None: self.logger ("[info] force to kill %s" % self.child.name) self.child.send_signal ('kill') def run (self): if os.name == "nt": signal.signal(signal.SIGBREAK, self.shutdown) else: signal.signal(signal.SIGTERM, self.shutdown) try: try: self.start () except: self.logger.trace () finally: self.shutdown () def create (self): self.child = process.Process ( self.cmd, 'instance', not self.varpath and (os.name == "posix" and '/var/skitai' or r'c:\var\skitai') ) def start (self): self.create () try: while 1: exitcode = self.child.poll () if exitcode is None: self.set_backoff (True) continue if exitcode == 0: self.logger ("[info] instance has been shutdowned cleanly") break elif exitcode == 3: self.logger ("[info] try re-starting up instance") self.create () else: self.set_backoff () if time.time() - self.backoff_start_time >= self.backoff_interval: self.logger ("[fail] instance encountered unexpected error and terminated, try re-starting up (current backoff interval is %d)" % self.backoff_interval) self.backoff_interval = self.backoff_interval * 2 if self.backoff_interval > self.BACKOFF_MAX_INTERVAL: self.backoff_interval = self.BACKOFF_MAX_INTERVAL self.create () time.sleep (3) except KeyboardInterrupt: pass if __name__ == "__main__": service = Service () service.run ()
Python
0.000015
@@ -679,18 +679,16 @@ (False)%0A -%09%09 %0A%09%09self. @@ -1436,16 +1436,73 @@ kill')%0A%09 +%0A%09def hTERM (self, signum, frame):%09%09%09%0A%09%09self.shutdown ()%0A %09%09%0A%09def @@ -1570,32 +1570,29 @@ BREAK, self. -shutdown +hTERM )%0A%09%09else:%0A%09%09 @@ -1631,30 +1631,17 @@ elf. -shutdown)%0A%09%09%09%0A%09%09try:%0A%09 +hTERM)%0A%09%0A %09%09tr @@ -1642,25 +1642,24 @@ %09%0A%09%09try:%0A%09%09%09 -%09 self.start ( @@ -1666,69 +1666,72 @@ )%0A%09%09 -%09 except -:%0A%09%09%09%09self.logger.trace ()%0A%09%09finally:%0A%09%09%09self.shutdown + KeyboardInterrupt:%0A%09%09%09pass%0A%09%09except:%0A%09%09%09self.logger.trace ()%0A
8fbdced7a4c8ea61116e8c978e420c30b8a1f1dc
update for urls.py
slothauth/urls.py
slothauth/urls.py
from django.conf.urls import include, url from rest_framework.routers import DefaultRouter from .views import change_email, login, logout, password_reset, profile, signup, passwordless_signup,\ passwordless_login, AccountViewSet, AuthViewSet from . import settings router = DefaultRouter() router.register(r'accounts', AccountViewSet) router.register(r'accounts/auth', AuthViewSet) urlpatterns = [ url(r'^api/' + settings.API_VERSION + '/', include(router.urls)), url(r'^signup/?', signup, name='signup'), url(r'^login/?', login, name='login'), url(r'^password_reset/?', password_reset, name='password_reset'), url(r'^change_email/?', change_email, name='change_email'), url(r'^profile/?', profile, name='profile'), url(r'^logout/?', logout, name='logout'), url(r'^passwordless_signup/?', passwordless_signup, name='passwordless_signup'), url(r'^passwordless_login/?', passwordless_login, name='passwordless_login'), #(r'^password-reset-done/$', 'django.contrib.auth.views.password_reset_complete'), #(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>.+)/$', 'django.contrib.auth.views.password_reset_confirm', # {'post_reset_redirect' : '/password-reset-done/'}), ]
Python
0
@@ -484,16 +484,103 @@ .urls)), + # TODO makes sense to have a settings.API_BASE_URL rather than a settings.API_VERSION? %0A url @@ -1324,8 +1324,65 @@ /'%7D),%0A%5D%0A +%0A# TODO create setting for turning on and off debug urls%0A
ac765968a9a83685c28244200958164d2a0fc81e
fix typo
smartmin/email.py
smartmin/email.py
from django.conf import settings from django.template import Context from django.utils.module_loading import import_string def link_components(request, user=None): protocol = 'https' if request.is_secure() else 'http' hostname = getattr(settings, 'HOSTNAME', request.get_host()) return {"protocol": protocol, "hostname": hostname} def build_email_context(request=None, user=None): context = Context({'user': user}) processors = [] collect = [] collect.extend(getattr(settings, "EMAIL_CONTEXT_PROCESSORS", ('smartmin.emaile.link_components',))) for path in collect: func = import_string(path) processors.append(func) for processor in processors: context.update(processor(request, user)) return context
Python
0.999991
@@ -576,17 +576,16 @@ in.email -e .link_co
091735fce650d6326e73ca6fb224a77ae68bb601
Add data written to message
salaryzenaggr/manager.py
salaryzenaggr/manager.py
# -*- coding: utf-8 -*- # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from datetime import datetime from stevedore import extension from salaryzenaggr.formatters import json_formatter _fetchers = extension.ExtensionManager(namespace='salaryzenaggr.fetchers', invoke_on_load=True) def _get_fetchers(banks, currencies): for ext in _fetchers.extensions: fetcher = ext.obj if (any([bank in fetcher.get_supported_banks() for bank in banks]) and any([curr in fetcher.get_supported_currencies() for curr in currencies])): yield fetcher def aggregate_rates(banks, currencies, from_date, result_file, debug): res = {} for fetcher in _get_fetchers(banks, currencies): fetcher.fetch_data(res, currencies, from_date) formatter = json_formatter.JsonPrettyFormatter if debug else json_formatter.JsonFormatter output = formatter().format_data(res) if debug: print output print "New data aggregated at %s UTC" % datetime.utcnow() if result_file: result_file.write(output) result_file.close()
Python
0.000007
@@ -1590,8 +1590,71 @@ close()%0A + print %22Data successfully written to %25s%22 %25 result_file%0A%0A
1dd8ce20632d8a6b857a06136b89ac11b27c8f07
Update documentation
letters/models.py
letters/models.py
from django.db import models from common.models import Citation class Letter(models.Model): """A letter from one of Pliny's books of personal correspondence""" book = models.PositiveSmallIntegerField() manuscript_correspondent_name = models.CharField(blank=True, max_length=255) letter = models.PositiveSmallIntegerField() topics = models.ManyToManyField('Topic', blank=True) date = models.PositiveSmallIntegerField(blank=True, null=True) citations = models.ManyToManyField(Citation, blank=True) class Meta: unique_together = ('book', 'letter') ordering = ['book', 'letter'] def __str__(self): return "%s.%s" % (self.book, self.letter) class Topic(models.Model): """A topic for one of Pliny's letters""" name = models.CharField(max_length=255) def __str__(self): return self.name
Python
0
@@ -155,16 +155,452 @@ pondence +%0A Attributes:%0A book (PositiveSmallIntegerField): book number.%0A manuscript_correspondent_name (CharField): override if manuscript%0A correspondent as written differs from database name%0A letter (PositiveSmallIntegerField): letter number%0A date (PositiveSmallIntegerField): Year of letter if known.%0A citations (ManyToManyField): Citations related to the letter%0A %22%22%22%0A
e7f5efafbdbd674adfb60bbadac6665860dd23a0
Update __main__.py
snake/__main__.py
snake/__main__.py
import argparse import sys from .assembler import Assembler from .vm import System def assembler(): parser = argparse.ArgumentParser(description='A 2 pass assembler.', formatter_class=argparse.ArgumentDefaultsHelpFormatter) # Take action depending on whether or not this is being pipelined if sys.stdin.isatty(): parser.add_argument("file", help="file to be assembled.") parser.add_argument('-o','--outfile', help='output file', default=None, required=False) args = parser.parse_args() try: with open(args.file, 'r') as f: a = Assembler(f) a.assemble() output_records = a.generated_records except IOError: print("[IO Error]: The source file could not be opened.") else: try: if args.outfile is None: for record in output_records: print(record) else: with open(args.outfile, 'w') as w: for record in output_records: w.write(record) w.write('\n') except IOError: print("[IO Error]: The output file could not be opened.") else: a = Assembler(sys.stdin) try: a.assemble() output_records = a.generated_records except StopIteration: print("[IO Error]: The source program could not be read from stdin") else: for record in output_records: print(record) def vm(): parser = argparse.ArgumentParser(description='A simple vm.', formatter_class=argparse.ArgumentDefaultsHelpFormatter) # Take action depending on whether or not this is being pipelined if sys.stdin.isatty(): parser.add_argument("file", help="file to be assembled.") parser.add_argument('-o','--outfile', help='output file', default=None, required=False) parser.add_argument('--step', dest='step', help='step through each instruction cycle.', action='store_true') parser.set_defaults(step=False) args = parser.parse_args() try: system = System() with open(args.file, 'r') as f: system.load_file(f) system.step = args.step system.run() except IOError: print("[IO Error]: The source file could not be opened.") except: print "IR: %s\nPC: %s\nOutput: %s\n" % \ (system.ir, system.pc, system.format_output()) raise else: try: system = System() system.load_file(sys.stdin) system.run() except StopIteration: print("[IO Error]: The source program could not be read from stdin") except: print "IR: %s\nPC: %s\nOutput: %s\n" % \ (system.ir, system.pc, system.format_output()) raise if __name__ == '__main__': assembler()
Python
0.000063
@@ -1920,39 +1920,36 @@ elp=%22file to be -assembl +load ed.%22)%0A pa
3b787bc966bda0e39c928fd0bf46c8e9bf012044
bump up DB pool size and timeout for tests
biweeklybudget/tests/conftest.py
biweeklybudget/tests/conftest.py
""" The latest version of this package is available at: <http://github.com/jantman/biweeklybudget> ################################################################################ Copyright 2016 Jason Antman <jason@jasonantman.com> <http://www.jasonantman.com> This file is part of biweeklybudget, also known as biweeklybudget. biweeklybudget is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. biweeklybudget is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License along with biweeklybudget. If not, see <http://www.gnu.org/licenses/>. The Copyright and Authors attributions contained herein may not be removed or otherwise altered, except to add the Author attribution of a contributor to this work. (Additional Terms pursuant to Section 7b of the AGPL v3) ################################################################################ While not legally required, I sincerely request that anyone who finds bugs please submit them at <https://github.com/jantman/biweeklybudget> or to me via email, and that you send any contributions or improvements either as a pull request on GitHub, or to me via email. ################################################################################ AUTHORS: Jason Antman <jason@jasonantman.com> <http://www.jasonantman.com> ################################################################################ """ import pytest import os import logging from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker, scoped_session import biweeklybudget.settings import socket from biweeklybudget.tests.fixtures.sampledata import SampleDataLoader try: from pytest_flask.fixtures import LiveServer except ImportError: pass connstr = os.environ.get('DB_CONNSTRING', None) if connstr is None: connstr = 'mysql+pymysql://budgetTester:jew8fu0ue@127.0.0.1:3306/' \ 'budgettest?charset=utf8mb4' os.environ['DB_CONNSTRING'] = connstr biweeklybudget.settings.DB_CONNSTRING = connstr import biweeklybudget.db # noqa import biweeklybudget.models.base # noqa from biweeklybudget.flaskapp.app import app # noqa engine = create_engine( connstr, convert_unicode=True, echo=False, connect_args={'sql_mode': 'STRICT_ALL_TABLES'} ) logger = logging.getLogger(__name__) # suppress webdriver DEBUG logging selenium_log = logging.getLogger("selenium") selenium_log.setLevel(logging.INFO) selenium_log.propagate = True @pytest.fixture(scope="session") def refreshdb(): """ Refresh/Load DB data before tests """ # setup the connection conn = engine.connect() if 'NO_REFRESH_DB' not in os.environ: logger.info('Refreshing DB (session-scoped)') # clean the database biweeklybudget.models.base.Base.metadata.reflect(engine) biweeklybudget.models.base.Base.metadata.drop_all(engine) biweeklybudget.models.base.Base.metadata.create_all(engine) # load the sample data data_sess = scoped_session( sessionmaker(autocommit=False, autoflush=False, bind=conn) ) SampleDataLoader(data_sess).load() data_sess.flush() data_sess.commit() data_sess.close() else: logger.info('Skipping session-scoped DB refresh') # create a session to use for the tests sess = scoped_session( sessionmaker(autocommit=False, bind=conn) ) # yield the session yield(sess) # when we're done, close sess.close() conn.close() @pytest.fixture(scope="class") def class_refresh_db(): """ This fixture rolls the DB back to the previous state when the class is finished; to be used on classes that alter data. Use like: @pytest.mark.usefixtures('class_refresh_db', 'testdb') class MyClass(AcceptanceHelper): """ logger.info('Connecting to DB (class-scoped)') # setup the connection conn = engine.connect() sess = sessionmaker(autocommit=False, bind=conn)() # yield the session yield(sess) sess.close() logger.info('Refreshing DB (class-scoped)') # clean the database biweeklybudget.models.base.Base.metadata.reflect(engine) biweeklybudget.models.base.Base.metadata.drop_all(engine) biweeklybudget.models.base.Base.metadata.create_all(engine) # load the sample data data_sess = scoped_session( sessionmaker(autocommit=False, autoflush=False, bind=conn) ) SampleDataLoader(data_sess).load() data_sess.flush() data_sess.commit() data_sess.close() # when we're done, close conn.close() @pytest.fixture def testdb(): """ DB fixture to be used in tests """ # setup the connection conn = engine.connect() sess = sessionmaker(autocommit=False, bind=conn)() # yield the session yield(sess) sess.close() @pytest.fixture(scope="session") def testflask(): """ This is a version of pytest-flask's live_server fixture, modified for session use. """ # Bind to an open port s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind(('', 0)) port = s.getsockname()[1] s.close() server = LiveServer(app, port) server.start() yield(server) server.stop() @pytest.fixture(scope="session") def base_url(testflask): """ Simple fixture to return ``testflask`` base URL """ return testflask.url() @pytest.fixture def selenium(selenium): """ Per pytest-selenium docs, use this to override the selenium fixture to provide global common setup. """ selenium.set_window_size(1200, 800) selenium.implicitly_wait(2) # from http://stackoverflow.com/a/13853684/211734 selenium.set_script_timeout(60) # from http://stackoverflow.com/a/17536547/211734 selenium.set_page_load_timeout(60) return selenium
Python
0
@@ -2678,16 +2678,52 @@ TABLES'%7D +,%0A pool_size=10, pool_timeout=120 %0A)%0A%0Alogg
c1be270c96ef07faa7ceecf4117890ce06af65a8
Update setup.py
wrappers/python/setup.py
wrappers/python/setup.py
from setuptools import setup, find_packages from setuptools.dist import Distribution # _version.py should be generated by running find_librs_version.py and copied to pyrealsense2 folder from pyrealsense2._version import __version__ import os import io package_name = "pyrealsense2" package_data = {} print("version = ", __version__) def load_readme(): with io.open('README.rst', encoding="utf-8") as f: return f.read() if os.name == 'posix': package_data[package_name] = ['*.so'] else: package_data[package_name] = ['*.pyd', '*.dll'] # This creates a list which is empty but returns a length of 1. # Should make the wheel a binary distribution and platlib compliant. class EmptyListWithLength(list): def __len__(self): return 1 setup( name=package_name, version=__version__, author='Intel(R) RealSense(TM)', author_email='realsense@intel.com', url='https://github.com/IntelRealSense/librealsense', scripts=['examples/align-depth2color.py', 'examples/export_ply_example.py', 'examples/opencv_viewer_example.py', 'examples/python-rs400-advanced-mode-example.py', 'examples/python-tutorial-1-depth.py' ], license='Apache License, Version 2.0', description='Python Wrapper for Intel Realsense SDK 2.0.', long_description=load_readme(), install_requires=[], classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: Education', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: Apache Software License', 'Operating System :: MacOS', 'Operating System :: Microsoft :: Windows', 'Operating System :: POSIX', 'Operating System :: Unix', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Topic :: Multimedia :: Video', 'Topic :: Scientific/Engineering', 'Topic :: Scientific/Engineering :: Human Machine Interfaces', 'Topic :: Scientific/Engineering :: Image Recognition', 'Topic :: Software Development', 'Topic :: Software Development :: Libraries :: Application Frameworks' ], packages=find_packages(exclude=['third_party', 'docs', 'examples']), include_package_data=True, ext_modules=EmptyListWithLength(), package_data=package_data )
Python
0.000001
@@ -1682,45 +1682,8 @@ e',%0A - 'Operating System :: MacOS',%0A
26af472f187de01f6e0d8d609f2d857e6557c984
allow case insensitive menu in top url only
mapannotations/urls.py
mapannotations/urls.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2016 University of Dundee. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # Author: Aleksandra Tarkowska <A(dot)Tarkowska(at)dundee(dot)ac(dot)uk>, # # Version: 1.0 from mapannotations import views from django.conf.urls import url, patterns from django.core.urlresolvers import reverse from django.utils.functional import lazy from django.views.generic import RedirectView from django.views.decorators.cache import never_cache from map_settings import map_settings reverse_lazy = lazy(reverse, str) # concatenate aliases to use in url regex MENU_MAPPER_REGEX = "(%s)" % ("|".join(map_settings.MENU_MAPPER)) DEFAULT_MENU = map_settings.MENU_MAPPER.iterkeys().next() urlpatterns = patterns('',) # alias for m in map_settings.MENU_MAPPER: urlpatterns += ( url(r'^%s/$' % m, views.index, {'menu': m}, name="mapindex_%s" % m), ) urlpatterns += ( # core url(r'^$', never_cache( RedirectView.as_view( url=reverse_lazy('mapindex_%s' % DEFAULT_MENU), permanent=True, query_string=True)), name="mapindex"), url(r'^api/experimenters/(?P<menu>%s)/' r'(?P<experimenter_id>([-1]|[0-9])+)/$' % MENU_MAPPER_REGEX, views.api_experimenter_detail, name='mapannotations_api_experimenter'), url(r'^api/mapannotations/(?P<menu>%s)/$' % MENU_MAPPER_REGEX, views.api_mapannotation_list, name='mapannotations_api_mapannotations'), url(r'^api/plates/(?P<menu>%s)/$' % MENU_MAPPER_REGEX, views.api_plate_list, name='mapannotations_api_plates'), url(r'^api/images/(?P<menu>%s)/$' % MENU_MAPPER_REGEX, views.api_image_list, name='mapannotations_api_images'), url(r'^api/paths_to_object/(?P<menu>%s)/$' % MENU_MAPPER_REGEX, views.api_paths_to_object, name='mapannotations_api_paths_to_object'), # TODO: c_id takes namedValue.name as an attribute, make sure regex match url(r'^metadata_details/(?P<c_type>%s)/' r'(?P<c_id>(.*))/$' % MENU_MAPPER_REGEX, views.load_metadata_details, name="load_metadata_details"), # autocomplete url(r'^autocomplete/(?P<menu>%s)/$' % MENU_MAPPER_REGEX, views.mapannotations_autocomplete, name='mapannotations_autocomplete'), )
Python
0
@@ -1449,16 +1449,20 @@ url(r'%5E +(?i) %25s/$' %25
60ccc393fc428d609e772b838c5a4c4002e3ec49
Add sendError method
snp/SNProtocol.py
snp/SNProtocol.py
from twisted.internet import defer from twisted.protocols.basic import NetstringReceiver import json class SNError(Exception): def __init__(self, *args, **kwargs): Exception.__init__(self, args, kwargs) self.code = args[1] self.request = args[2] class SNProtocol(NetstringReceiver): id_counter = 0 def stringReceived(self, string): packet = json.loads(string) if "reqid" in packet: if len(packet["reqid"]) > 2: type = packet["reqid"][:2] reqid = packet["reqid"][2:] if type == "RQ": self.factory.service.hadleRequest(packet, reqid, self) elif type == "RE": if reqid in self.requests: self.factory.requests[reqid].callback(packet) self.factory.requests.pop(reqid) def sendRequest(self, request): reqid = str(self.id_counter) request["reqid"] = "RQ{0}".format(reqid) self._sendPacket(request) d = self.createDeferred(reqid) self.id_counter += 1 return d def sendResponse(self, request, reqid): request["reqid"] = "RE{0}".format(str(reqid)) self._sendPacket(request) def _sendPacket(self, request): json_str = json.dumps(request) self.sendString(json_str) def connectionMade(self): self.factory.service.connectionMade(self) def createDeferred(self, reqid): d = defer.Deferred() d.addCallback(self.errorChecker) self.factory.service.requests[reqid] = d return d def errorChecker(self, packet): if "Error" in packet: raise SNError("", int(packet["Error"]), packet["Request"]) return packet
Python
0.000001
@@ -1253,16 +1253,133 @@ quest)%0A%0A + def sendError(self, code, request):%0A r = %7B%22Error%22: code, %22Request%22: request%7D%0A self._sendPacket(r)%0A%0A def
b223865ded88b5467c1088abbf628048e39e564c
Test str_cat() schema; use fixtures for exception tests
blaze/expr/tests/test_strings.py
blaze/expr/tests/test_strings.py
import pytest from datashape import dshape from blaze import symbol dshapes = ['var * {name: string}', 'var * {name: ?string}', 'var * string', 'var * ?string', 'string'] @pytest.mark.parametrize('ds', dshapes) def test_like(ds): t = symbol('t', ds) expr = getattr(t, 'name', t).like('Alice*') assert expr.pattern == 'Alice*' assert expr.schema.measure == dshape( '%sbool' % ('?' if '?' in ds else '') ).measure @pytest.mark.parametrize('ds', dshapes) def test_str_upper_schema(ds): t = symbol('t', ds) expr_upper = getattr(t, 'name', t).str_upper() expr_lower = getattr(t, 'name', t).str_upper() assert (expr_upper.schema.measure == expr_lower.schema.measure == dshape('%sstring' % ('?' if '?' in ds else '')).measure) class TestStrCatExceptions(): ds = dshape('3 * {name: string[10], comment: string[25], num: int32}') s = symbol('s', dshape=ds) def test_str_cat_exception_non_string_sep(self): with pytest.raises(TypeError): self.s.name.str_cat(self.s.comment, sep=123) def test_str_cat_exception_non_string_col_to_cat(self): with pytest.raises(TypeError): self.s.name.str_cat(self.s.num)
Python
0
@@ -211,16 +211,538 @@ ring'%5D%0A%0A +lhsrhs_ds = %5B'var * %7Bname: string, comment: string%5B25%5D%7D',%0A 'var * %7Bname: string%5B10%5D, comment: string%7D',%0A 'var * %7Bname: string, comment: string%7D',%0A 'var * %7Bname: ?string, comment: string%7D',%0A 'var * %7Bname: string, comment: ?string%7D'%5D%0A%0A%0A@pytest.fixture(scope='module')%0Adef strcat_sym():%0A '''%0A blaze symbol used to test exceptions raised by str_cat()%0A '''%0A ds = dshape('3 * %7Bname: string, comment: string, num: int32%7D')%0A s = symbol('s', dshape=ds)%0A return s%0A%0A%0A @pytest. @@ -1358,149 +1358,242 @@ )%0A%0A%0A -class TestStrCatExceptions( +@pytest.mark.parametrize('ds', lhsrhs_ds)%0Adef test_str_schema(ds ):%0A -ds = dshape('3 * %7B +t = symbol('t', ds)%0A expr = t. name -: string%5B10%5D, comment: string%5B25%5D, num: int32%7D')%0A s = symbol('s', dshape=ds)%0A%0A +.str_cat(t.comment)%0A assert (expr.schema.measure ==%0A dshape('%25sstring' %25 ('?' if '?' in ds else '')).measure)%0A%0A%0A def @@ -1631,26 +1631,28 @@ ng_sep(s -elf):%0A +trcat_sym):%0A with @@ -1682,34 +1682,34 @@ r):%0A - self.s +strcat_sym .name.str_ca @@ -1711,21 +1711,25 @@ tr_cat(s -elf.s +trcat_sym .comment @@ -1740,20 +1740,17 @@ p=123)%0A%0A - +%0A def test @@ -1795,18 +1795,20 @@ at(s -elf):%0A +trcat_sym):%0A @@ -1850,18 +1850,18 @@ - self.s +strcat_sym .nam @@ -1875,13 +1875,17 @@ at(s -elf.s +trcat_sym .num
a357e9bb209914a9e7ae4e391ea21f23b1c1f477
Use a common cluster_config mock for unsubscribe_topics unit tests.
tests/kafka_consumer_manager/test_unsubscribe_topics.py
tests/kafka_consumer_manager/test_unsubscribe_topics.py
import contextlib import mock import pytest from kazoo.exceptions import NoNodeError from kazoo.exceptions import ZookeeperError from yelp_kafka_tool.kafka_consumer_manager.commands. \ unsubscribe_topics import UnsubscribeTopics @mock.patch('yelp_kafka_tool.kafka_consumer_manager.' 'commands.unsubscribe_topics.KafkaClient') class TestUnsubscribeTopics(object): topics_partitions = { "topic1": [0, 1, 2], "topic2": [0, 1], } @contextlib.contextmanager def mock_kafka_info(self): with mock.patch.object( UnsubscribeTopics, "preprocess_args", spec=UnsubscribeTopics.preprocess_args, return_value=self.topics_partitions, )as mock_writer_process_args, mock.patch( 'yelp_kafka_tool.kafka_consumer_manager.' 'commands.unsubscribe_topics.ZK', autospec=True ) as mock_ZK: mock_ZK.return_value.__enter__.return_value = mock_ZK.return_value yield mock_writer_process_args, mock_ZK def test_run_some_partitions_left(self, mock_client): with self.mock_kafka_info() as (mock_writer_process_args, mock_ZK): args = mock.Mock( groupid="some_group", topic="topic1", partitions=[0, 1, 2] ) cluster_config = mock.Mock(zookeeper='some_ip') mock_ZK.return_value.get_my_subscribed_partitions.return_value = [3] UnsubscribeTopics.run(args, cluster_config) calls = [ mock.call( args.groupid, "topic1", [0, 1, 2] ), ] obj = mock_ZK.return_value assert obj.delete_topic_partitions.call_args_list == calls # Delete topic should not be called because the group is still # subscribed to some topic partitions assert not obj.delete_topic.called def test_run_wipe_all_partitions(self, mock_client): with self.mock_kafka_info() as (mock_writer_process_args, mock_ZK): args = mock.Mock( groupid="some_group", topic="topic1", partitions=[0, 1, 2] ) mock_ZK.return_value.get_my_subscribed_partitions.return_value = [] cluster_config = mock.Mock(zookeeper='some_ip') UnsubscribeTopics.run(args, cluster_config) calls = [ mock.call( args.groupid, "topic1", [0, 1, 2] ), ] obj = mock_ZK.return_value assert obj.delete_topic_partitions.call_args_list == calls assert obj.delete_topic.call_args_list == [ mock.call(args.groupid, "topic1"), ] def test_run_wipe_default_partitions(self, mock_client): with self.mock_kafka_info() as (mock_writer_process_args, mock_ZK): args = mock.Mock( groupid="some_group", topic="topic1", partitions=None ) cluster_config = mock.Mock(zookeeper='some_ip') UnsubscribeTopics.run(args, cluster_config) obj = mock_ZK.return_value assert obj.delete_topic_partitions.call_count == 0 assert obj.delete_topic.call_args_list == [ mock.call(args.groupid, "topic1"), ] def test_run_wipe_default_topics(self, mock_client): with self.mock_kafka_info() as (mock_writer_process_args, mock_ZK): args = mock.Mock( groupid="some_group", topic=None, partitions=None ) cluster_config = mock.Mock(zookeeper='some_ip') UnsubscribeTopics.run(args, cluster_config) obj = mock_ZK.return_value assert obj.delete_topic_partitions.call_count == 0 assert sorted(obj.delete_topic.call_args_list) == sorted( [ mock.call(args.groupid, "topic1"), mock.call(args.groupid, "topic2"), ], ) def test_run_no_node_error(self, mock_client): with self.mock_kafka_info() as (mock_writer_process_args, mock_ZK): obj = mock_ZK.return_value obj.delete_topic_partitions.side_effect = NoNodeError("Boom!") args = mock.Mock( groupid="some_group", topic="topic1", partitions=[0, 1, 2] ) cluster_config = mock.Mock(zookeeper='some_ip') UnsubscribeTopics.run(args, cluster_config) assert mock_ZK.return_value.delete_topic_partitions.called def test_run_any_other_exception(self, mock_client): with self.mock_kafka_info() as (mock_writer_process_args, mock_ZK): obj = mock_ZK.return_value.__enter__.return_value obj.__exit__.return_value = False obj.delete_topic_partitions.side_effect = ZookeeperError("Boom!") args = mock.Mock( groupid="some_group", topic="topic1", partitions=[0, 1, 2] ) cluster_config = mock.Mock(zookeeper='some_ip') with pytest.raises(ZookeeperError): UnsubscribeTopics.run(args, cluster_config)
Python
0
@@ -464,16 +464,69 @@ %0A %7D%0A%0A + cluster_config = mock.Mock(zookeeper='some_ip')%0A%0A @con @@ -1393,68 +1393,8 @@ )%0A - cluster_config = mock.Mock(zookeeper='some_ip')%0A @@ -1503,32 +1503,37 @@ opics.run(args, +self. cluster_config)%0A @@ -2360,68 +2360,8 @@ = %5B%5D -%0A cluster_config = mock.Mock(zookeeper='some_ip') %0A%0A @@ -2390,32 +2390,37 @@ opics.run(args, +self. cluster_config)%0A @@ -3115,68 +3115,8 @@ ) -%0A cluster_config = mock.Mock(zookeeper='some_ip') %0A%0A @@ -3145,32 +3145,37 @@ opics.run(args, +self. cluster_config)%0A @@ -3677,68 +3677,8 @@ ) -%0A cluster_config = mock.Mock(zookeeper='some_ip') %0A%0A @@ -3707,32 +3707,37 @@ opics.run(args, +self. cluster_config)%0A @@ -4466,68 +4466,8 @@ ) -%0A cluster_config = mock.Mock(zookeeper='some_ip') %0A%0A @@ -4496,32 +4496,37 @@ opics.run(args, +self. cluster_config)%0A @@ -5070,68 +5070,8 @@ ) -%0A cluster_config = mock.Mock(zookeeper='some_ip') %0A%0A @@ -5160,16 +5160,21 @@ n(args, +self. cluster_
20c0f26fa6d09fdb3a2177779e2495804931fccb
fix branch on core
master/master/steps.py
master/master/steps.py
# No shebang line, this module is meant to be imported # # Copyright 2014 Oliver Palmer # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import json from functools import partial from buildbot.process.factory import BuildFactory from buildbot.steps.source.git import Git from buildbot.steps.shell import SetPropertyFromCommand from buildbot.steps.slave import RemoveDirectory from buildbot.steps.shell import ShellCommand from buildbot.process.properties import Property CREATE_ENVIRONMENT = """ from __future__ import print_function import os import json import tempfile import subprocess tempdir = tempfile.mkdtemp() virtualenv_root = os.path.join(tempdir, "virtualenv") short_pyversion = os.environ["PYTHON_VERSION"].replace(".", "") # Windows requires the site-packages directory so we can access things # like pywin32 if os.name == "nt": mkvirtualenv = [ "C:\\Python%s\\Scripts\\virtualenv.exe" % short_pyversion, virtualenv_root, "--system-site-packages", "--quiet"] python_bin_name = "python.exe" pip_bin_name = "pip.exe" bin_dir = "Scripts" nose_bin_name = "nosetests.exe" else: mkvirtualenv = [ "virtualenv-%s" % os.environ["PYTHON_VERSION"], virtualenv_root, "--quiet"] python_bin_name = "python" pip_bin_name = "pip" bin_dir = "bin" nose_bin_name = "nosetests" # Create the virtualenv subprocess.check_call(mkvirtualenv) print(json.dumps( {"virtualenv": virtualenv_root, "tempdir": tempdir, "python": os.path.join(virtualenv_root, bin_dir, python_bin_name), "pip": os.path.join(virtualenv_root, bin_dir, pip_bin_name), "nosetests": os.path.join(virtualenv_root, bin_dir, nose_bin_name)})) """.strip() CREATE_REQUIREMENTS = """ """.strip() REPO_URL = "https://github.com/pyfarm/pyfarm-{project}" Clone = partial(Git, clobberOnFailure=True, progress=True, mode='full') class CreateEnvironment(SetPropertyFromCommand): def __init__(self, *args, **kwargs): kwargs.setdefault("extract_fn", self._extract_fn) SetPropertyFromCommand.__init__(self, *args, **kwargs) def _extract_fn(self, _, stdout, stderr): data = json.loads(stdout) for key, value in data.items(): if isinstance(value, unicode): data[key] = str(value) return data def clone_steps(project): projects = ["core"] if project == "core": steps = [ Clone(REPO_URL.format(project="core"), workdir="core")] if project != "core": steps = [ Clone(REPO_URL.format(project="core"), workdir="core", branch="master")] projects.append(project) steps.append( Clone(REPO_URL.format(project=project), workdir=project, name="clone %s" % project)) return projects, steps def get_build_factory(project, platform, pyversion, dbtype): factory = BuildFactory() pip_download_cache = "pip_cache" if platform == "linux": pip_download_cache = "/home/buildbot/pip_cache" if platform == "win": pip_download_cache = "C:\\Users\\buildbot\\pip_cache" if platform == "mac": pip_download_cache = "/Users/buildbot/pip_cache" # Git project_dirs, git_steps = clone_steps(project) factory.addSteps(git_steps) # Create the virtual environment factory.addStep( CreateEnvironment( name="create environment", command=["python%s" % pyversion, "-c", CREATE_ENVIRONMENT])) # Install 'core' factory.addStep( ShellCommand( name="install pyfarm.core", workdir="core", env={"PIP_DOWNLOAD_CACHE": pip_download_cache}, command=[Property("pip"), "install", "-e", ".", "--egg"])) # Install this package if project != "core": factory.addStep( ShellCommand( name="install pyfarm.%s" % project, workdir=project, env={"PIP_DOWNLOAD_CACHE": pip_download_cache}, command=[Property("pip"), "install", "-e", ".", "--egg"])) # Install test packages requirements = ["nose"] env = {} if not pyversion.startswith("3."): requirements.append("mock") if pyversion == "2.6": requirements.append("unittest2") db_name = "pyfarm_unittest_%s_%s" % (platform, pyversion.replace(".", "")) print "testdb: %s" % db_name if dbtype == "mysql": requirements.append("mysql-connector-python") env.update( DATABASE_NAME=db_name, PYFARM_DATABASE_URI= "mysql+mysqlconnector://buildbot:42e203517fe6eafda2bfa96580c4973f9cc265b50afebef2@127.0.0.1/%s" % db_name) if dbtype == "postgres": requirements.append("psycopg2") env.update( DATABASE_NAME=db_name, PYFARM_DATABASE_URI= "postgresql+psycopg2://buildbot:42e203517fe6eafda2bfa96580c4973f9cc265b50afebef2@127.0.0.1/%s" % db_name) factory.addStep( ShellCommand( name="install additional packages", env={"PIP_DOWNLOAD_CACHE": pip_download_cache}, command=[ Property("pip"), "install", "--allow-external", "mysql-connector-python"] + requirements)) if project in ("core", "master"): # TODO: configure db factory.addStep( ShellCommand( name="run tests", workdir=project, env=env, command=[Property("nosetests"), "tests", "-s", "--verbose"])) # Destroy the virtualenv factory.addStep( RemoveDirectory( Property("tempdir"), flunkOnFailure=False, haltOnFailure=False)) return factory
Python
0
@@ -2969,36 +2969,20 @@ )%5D%0A%0A -if project != %22core%22 +else :%0A @@ -3009,96 +3009,221 @@ -Clone(REPO_URL.format(project=%22core%22), workdir=%22core%22,%0A branch=%22master%22 +RemoveDirectory(%22core%22),%0A ShellCommand(%0A workdir=%22core%22,%0A command=%5B%0A %22git%22, %22clone%22, %22https://github.com/pyfarm/pyfarm-core%22,%0A %22core%22%5D )%5D%0A%0A
7e648b7155836163da6abbfdb31f52d16204fe45
fix brokeh app init
bokeh/application/application.py
bokeh/application/application.py
''' ''' from __future__ import absolute_import import logging log = logging.getLogger(__name__) from tornado import gen from abc import ABCMeta, abstractmethod from ..util.future import with_metaclass from ..util.tornado import yield_for_all_futures from ..document import Document class ServerContext(with_metaclass(ABCMeta)): @property @abstractmethod def sessions(self): """ SessionContext instances belonging to this application.""" raise NotImplementedError("sessions property, should return SessionContext") @property @abstractmethod def develop_mode(self): """ True if we are in develop mode.""" raise NotImplementedError("develop_mode") @abstractmethod def add_next_tick_callback(self, callback): """ Adds a callback to be run on the next tick of the event loop.""" raise NotImplementedError("add_next_tick_callback") @abstractmethod def remove_next_tick_callback(self, callback): """ Removes a callback added with add_next_tick_callback, before it runs.""" raise NotImplementedError("remove_next_tick_callback") @abstractmethod def add_timeout_callback(self, callback, timeout_milliseconds): """ Adds a callback to be run once after timeout_milliseconds.""" raise NotImplementedError("add_timeout_callback") @abstractmethod def remove_timeout_callback(self, callback): """ Removes a callback added with add_timeout_callback, before it runs.""" raise NotImplementedError("remove_timeout_callback") @abstractmethod def add_periodic_callback(self, callback, period_milliseconds): """ Adds a callback to be run every period_milliseconds until it is removed.""" raise NotImplementedError("add_periodic_callback") @abstractmethod def remove_periodic_callback(self, callback): """ Removes a callback added with add_periodic_callback.""" raise NotImplementedError("remove_periodic_callback") class SessionContext(with_metaclass(ABCMeta)): def __init__(self, server_context, session_id): self._server_context = server_context self._id = session_id @property def server_context(self): return self._server_context @property def id(self): return self._id @property @abstractmethod def destroyed(self): """If True, the session has been discarded and cannot be used. A new session with the same ID could be created later but this object instance will not come back to life. """ raise NotImplementedError("destroyed") @abstractmethod def with_locked_document(self, func): """ Runs a function with the document lock held, passing the document to the function. The function may return a future. Args: func: function that takes a single parameter (the Document) and returns None or a Future Returns: a Future containing the result of the function """ raise NotImplementedError("locked_document") class Application(object): ''' An Application is a factory for Document instances. ''' def __init__(self, *handlers): self._handlers = [] for h in handlers: self.add(h) def create_document(self): ''' Creates and initializes a document using the Application's handlers.''' doc = Document() self.initialize_document(doc) return doc def initialize_document(self, doc): ''' Fills in a new document using the Application's handlers. ''' for h in self._handlers: # TODO (havocp) we need to check the 'failed' flag on each handler # and build a composite error display. In develop mode, we want to # somehow get these errors to the client. h.modify_document(doc) if h.failed: log.error("Error running application handler %r: %s %s ", h, h.error, h.error_detail) if self._template is not None: doc.template = self._template # A future server setting could make it configurable whether to do this, # since it has some performance impact probably. Let's see if we need to. doc.validate() def add(self, handler): ''' Add a handler to the pipeline used to initialize new documents. Args: handler (Handler) : a handler to process this Application ''' self._handlers.append(handler) # make sure there is at most one static path static_paths = set(h.static_path() for h in self.handlers) static_paths.discard(None) if len(static_paths) > 1: raise RuntimeError("More than one static path requested for app: %r" % list(static_paths)) elif len(static_paths) == 1: self._static_path = static_paths.pop() else: self._static_path = None # make sure there is at most one custom template templates = set(h.template() for h in self.handlers) templates.discard(None) if len(templates) > 1: raise RuntimeError("More than one custom template requested for app: %r" % list(templates)) elif len(templates) == 1: self._template = templates.pop() else: self._template = None @property def handlers(self): return tuple(self._handlers) @property def static_path(self): return self._static_path def on_server_loaded(self, server_context): """ Invoked after server startup but before any sessions are created.""" for h in self._handlers: h.on_server_loaded(server_context) def on_server_unloaded(self, server_context): """ Invoked in theory if the server shuts down cleanly, probably not invoked most of the time in practice since servers tend to be killed by a signal. Invoked before stopping the server's IOLoop.""" for h in self._handlers: h.on_server_unloaded(server_context) @gen.coroutine def on_session_created(self, session_context): """ Invoked when we create a new session, with a blank Document that hasn't been filled in yet. May return a Future which will delay session creation until the Future completes.""" for h in self._handlers: result = h.on_session_created(session_context) yield yield_for_all_futures(result) raise gen.Return(None) @gen.coroutine def on_session_destroyed(self, session_context): """ Invoked when we have destroyed a session. ``session_context.destroyed`` will be True.""" for h in self._handlers: result = h.on_session_destroyed(session_context) yield yield_for_all_futures(result) raise gen.Return(None)
Python
0.000001
@@ -3215,24 +3215,87 @@ *handlers):%0A + self._static_path = None%0A self._template = None%0A self
09fffb062b45e4715c092c0899a6d4f89cf0b4e1
Fix toolbarbox test
bokeh/models/tests/test_tools.py
bokeh/models/tests/test_tools.py
from __future__ import absolute_import from bokeh.models.layouts import Box from bokeh.models.tools import Toolbar, ToolbarBox # TODO (bev) validate entire list of props def test_Toolbar(): tb = Toolbar() assert tb.active_drag == 'auto' assert tb.active_scroll == 'auto' assert tb.active_tap == 'auto' # # ToolbarBox # def test_toolbar_box_is_instance_of_box(): tb_box = ToolbarBox() assert isinstance(tb_box, Box) def test_toolbar_box_properties(): tb_box = ToolbarBox() assert tb_box.logo == "normal" assert tb_box.toolbar_location == "right" assert tb_box.tools is None assert tb_box.merge_tools is True
Python
0.000001
@@ -612,15 +612,13 @@ ols -is None +== %5B%5D %0A
1f3200b34730bcbe516e7d4f7599e9d991270ade
Update generate strings script to output Google LLC rather than Inc
Scripts/generate_strings.py
Scripts/generate_strings.py
#!/usr/bin/python """ Generates a Swift String extension containing static variables for all strings in the main Localizable.strings file. This script runs automatically at build time in Xcode as a Run Script phase. """ ## # Copyright 2019 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ## import datetime import getopt import os import re import string import sys GENERATED_STRINGS_TEMPLATE = string.Template("""/* * Copyright ${year} Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * THIS FILE IS AUTOMATICALLY GENERATED, DO NOT EDIT. */ import Foundation extension String { ${strings} } """) def process_args(argv): """Process the command-line arguments for this script. Args: argv: The command line arguments. Returns: A tuple of the input and output files. """ help_info = "generate_strings.py -i <input_file> -o <output_file>" try: opts, _ = getopt.getopt(argv, "hi:o:", ["ifile=", "ofile="]) except getopt.GetoptError: print help_info sys.exit(2) for opt, arg in opts: if opt == '-h': print help_info sys.exit() elif opt in ("-i", "--ifile"): input_file = arg elif opt in ("-o", "--ofile"): output_file = arg return (input_file, output_file) def process_strings_file(inputfile): """Read an input file and parse each line looking for string keys. Arguments: inputfile: The file to parse for string keys. Returns: An array of Swift variables that were processed. Raises: ValueError: If strings are not found. """ swift_variables = [] with open(inputfile, "r") as read_file: for line in read_file: line = line.strip() # Strip whitespace from the line. if line == "": continue # If this is a blank line, move to next. # If this isn't a comment... if re.search(r'/\*.*\*/', line) is None: # Keyed line. Looks like "the_key" = "The string"; where there may be # a missing semicolon or additional spaces on either side of the # = sign. Regex match the key (left side). match = re.search(r'\"(.*?)\"\s+=\s+\".*?\";?', line) if match: # The key. text_key = match.group(1) # Build a variable version with lowercased first letter and then # camel-cased, underscores removed. components = text_key.lower().split("_") for component in components: if component == components[0]: # Skip capitalizing the first word. variable_name = component continue # Capitalize the first letter of other words and add it to the var # name. variable_name += component[0].upper() + component[1:] # Create the Swift variable from these values, at it to the list. swift_variables.append( swift_variable_from_key(text_key, variable_name)) if not swift_variables: raise ValueError("No strings found. Please check the format of your " "strings file.") return swift_variables def swift_variable_from_key(key, variable_name): """Generate a Swift static variable line for a string key and variable. Args: key: The string key for the variable. variable_name: The name of the variable. Returns: A string containing generated Swift code for a string. """ return ("static public var " + variable_name + ": String { return \"" + key + "\".localized }") def generate_strings_file(swift_variables, output_file): """Generate a complete Swift strings file from variables and save it. Args: swift_variables: An array of Swift variables. output_file: The file to write to. """ generated = GENERATED_STRINGS_TEMPLATE.substitute({ "strings" : "\n ".join(swift_variables), "year" : datetime.datetime.now().year }) write_file = open(output_file, "w") write_file.write(generated) write_file.close() def main(argv): """ Check for require i/o files, process strings and generate the file. """ input_file, output_file = process_args(argv) # Check files exist. if not os.path.isfile(input_file): raise ValueError("Input file not found: " + input_file) if not os.path.isfile(output_file): raise ValueError("Output file not found: " + output_file) # Process the strings file. swift_variables = process_strings_file(input_file) # Generate the output file and finish. generate_strings_file(swift_variables, output_file) quit() if __name__ == "__main__": main(sys.argv[1:])
Python
0.000022
@@ -241,35 +241,35 @@ ght 2019 Google -Inc +LLC . All Rights Res @@ -995,11 +995,11 @@ gle -Inc +LLC . Al
96704483b7821f40531fa494181ae4e0214fcb0a
Fix rogue format
tests/task_router/test_task_router_worker_capability.py
tests/task_router/test_task_router_worker_capability.py
import time import unittest from twilio import jwt from twilio.task_router import TaskRouterWorkerCapability class TaskRouterWorkerCapabilityTest(unittest.TestCase): def check_policy(self, method, url, policy): self.assertEqual(url, policy['url']) self.assertEqual(method, policy['method']) self.assertTrue(policy['allow']) self.assertEqual({}, policy['query_filter']) self.assertEqual({}, policy['post_filter']) def check_decoded(self, decoded, account_sid, workspace_sid, channel_id, channel_sid=None): self.assertEqual(decoded["iss"], account_sid) self.assertEqual(decoded["account_sid"], account_sid) self.assertEqual(decoded["workspace_sid"], workspace_sid) self.assertEqual(decoded["channel"], channel_id) self.assertEqual(decoded["version"], "v1") self.assertEqual(decoded["friendly_name"], channel_id) if 'worker_sid' in decoded.keys(): self.assertEqual(decoded['worker_sid'], channel_sid) if 'taskqueue_sid' in decoded.keys(): self.assertEqual(decoded['taskqueue_sid'], channel_sid) def setUp(self): self.account_sid = "AC123" self.auth_token = "foobar" self.workspace_sid = "WS456" self.worker_sid = "WK789" self.capability = TaskRouterWorkerCapability(self.account_sid, self.auth_token, self.workspace_sid, self.worker_sid) def test_generate_token(self): token = self.capability.generate_token() self.assertNotEqual(None, token) decoded = jwt.decode(token, self.auth_token) self.assertNotEqual(None, decoded) self.check_decoded(decoded, self.account_sid, self.workspace_sid, self.worker_sid, self.worker_sid) def test_generate_token_with_default_ttl(self): token = self.capability.generate_token() self.assertNotEqual(None, token) decoded = jwt.decode(token, self.auth_token) self.assertNotEqual(None, decoded) self.assertEqual(int(time.time()) + 3600, decoded["exp"]) def test_generate_token_with_custom_ttl(self): ttl = 10000 token = self.capability.generate_token(ttl) self.assertNotEqual(None, token) decoded = jwt.decode(token, self.auth_token) self.assertNotEqual(None, decoded) self.assertEqual(int(time.time()) + 10000, decoded["exp"]) def test_defaults(self): token = self.capability.generate_token() self.assertNotEqual(None, token) decoded = jwt.decode(token, self.auth_token) self.assertNotEqual(None, decoded) websocket_url = 'https://event-bridge.twilio.com/v1/wschannels/{}/{}'.format(self.account_sid, self.worker_sid) # expect 5 policies policies = decoded['policies'] self.assertEqual(len(policies), 5) # should expect 5 policies for method, url, policy in [ ('GET', websocket_url, policies[0]), ('POST', websocket_url, policies[1]), ('GET', "https://taskrouter.twilio.com/v1/Workspaces/WS456/Workers/WK789", policies[2]), ('GET', "https://taskrouter.twilio.com/v1/Workspaces/WS456/Tasks/**", policies[3]), ('GET', "https://taskrouter.twilio.com/v1/Workspaces/WS456/Activities", policies[4]) ]: yield self.check_policy, method, url, policy def test_allow_activity_updates(self): # allow activity updates to the worker self.capability.allow_activity_updates() token = self.capability.generate_token() self.assertNotEqual(None, token) decoded = jwt.decode(token, self.auth_token) self.assertNotEqual(None, decoded) policies = decoded['policies'] self.assertEqual(len(policies), 6) policy = policies[5] url = "https://taskrouter.twilio.com/v1/Workspaces/{}/Workers/{}".format(self.workspace_sid, self.worker_sid) self.assertEqual(url, policy["url"]) self.assertEqual("POST", policy["method"]) self.assertTrue(policy["allow"]) self.assertNotEqual(None, policy['post_filter']) self.assertEqual({}, policy['query_filter']) self.assertTrue(policy['post_filter']['ActivitySid']) def test_allow_reservation_updates(self): # allow reservation updates self.capability.allow_reservation_updates() token = self.capability.generate_token() self.assertNotEqual(None, token) decoded = jwt.decode(token, self.auth_token) self.assertNotEqual(None, decoded) policies = decoded['policies'] self.assertEqual(len(policies), 6) policy = policies[5] url = "https://taskrouter.twilio.com/v1/Workspaces/{}/Tasks/**".format(self.workspace_sid) self.check_policy('POST', url, policy) if __name__ == "__main__": unittest.main()
Python
0.000038
@@ -2679,11 +2679,13 @@ ls/%7B +0 %7D/%7B +1 %7D'.f @@ -3872,16 +3872,17 @@ spaces/%7B +0 %7D/Worker @@ -3884,16 +3884,17 @@ orkers/%7B +1 %7D%22.forma @@ -4739,16 +4739,17 @@ spaces/%7B +0 %7D/Tasks/
7daf7440b271c923e4a210a47ba4ba87d76181a3
Add more assertions to rendering test
communication/tests.py
communication/tests.py
from communication.mail import SUBJECT, TEXT, HTML, send_email, render_blocks from mock import Mock, patch, sentinel from unittest2 import TestCase from django.template.loader_tags import BlockNode class SendEmailTestCase(TestCase): def setUp(self): patcher = patch('communication.mail.settings') self.settings_mock = patcher.start() patcher = patch('communication.mail.render_blocks') self.render_mock = patcher.start() patcher = patch('communication.mail.EmailMultiAlternatives') self.email_mock = patcher.start() self.settings_mock.DEFAULT_FROM_EMAIL = sentinel.from_email def test_sending_email_without_html(self): """Html content is not attached when html block is missing""" self.render_mock.return_value = {SUBJECT: sentinel.subject, TEXT: sentinel.text} send_email(address=sentinel.address, template_name=sentinel.template_name, context=sentinel.context) self.assert_email_constructed() self.email_mock().send.assert_called_once() def test_sending_email_with_html(self): """Html content is attached when html block present""" self.render_mock.return_value = {SUBJECT: sentinel.subject, TEXT: sentinel.text, HTML: sentinel.html} send_email(address=sentinel.address, template_name=sentinel.template_name, context=sentinel.context) self.assert_email_constructed() self.email_mock().attach_alternative.assert_called_once_with( sentinel.html, 'text/html') self.email_mock().send.assert_called_once() def assert_email_constructed(self): self.email_mock.assert_called_once_with( subject=sentinel.subject, body=sentinel.text, from_email=sentinel.from_email, to=[sentinel.address]) def tearDown(self): patch.stopall() class RenderBlocksTestCase(TestCase): @patch('communication.mail.get_template') @patch('communication.mail.Context') def test_block_rendering(self, context_mock, get_template_mock): """Template blocks are rendered with proper context""" html_block = Mock(spec=BlockNode) html_block.name = HTML some_block = Mock(spec=BlockNode) some_block.name = 'some_block' non_block = Mock() get_template_mock.return_value = [html_block, some_block, non_block] blocks = render_blocks(template_name=sentinel.template_name, context=sentinel.context) self.assertEquals(blocks, {HTML: html_block.render()}) context_mock.assert_called_once_with(sentinel.context)
Python
0
@@ -2715,61 +2715,127 @@ -self.assertEquals(blocks, %7BHTML: html_block.render +context_mock.assert_called_once_with(sentinel.context)%0A html_block.render.assert_called_once_with(context_mock () -%7D )%0A @@ -2840,36 +2840,41 @@ -context_mock +some_block.render .assert_ called_o @@ -2869,39 +2869,125 @@ ert_ +not_ called -_once_with(sentinel.context +()%0A non_block.render.assert_not_called()%0A self.assertEquals(blocks, %7BHTML: html_block.render()%7D )%0A
32720b2cc4e2599a57ddf81ec1d9c334d71b29f1
Add Schedule to the system job migration
awx/main/migrations/0010_v300_create_system_job_templates.py
awx/main/migrations/0010_v300_create_system_job_templates.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations from django.utils.timezone import now from awx.api.license import feature_enabled def create_system_job_templates(apps, schema_editor): ''' Create default system job templates if not present. Create default schedules only if new system job templates were created (i.e. new database). ''' SystemJobTemplate = apps.get_model('main', 'SystemJobTemplate') ContentType = apps.get_model('contenttypes', 'ContentType') sjt_ct = ContentType.objects.get_for_model(SystemJobTemplate) now_dt = now() now_str = now_dt.strftime('%Y%m%dT%H%M%SZ') sjt, created = SystemJobTemplate.objects.get_or_create( job_type='cleanup_jobs', defaults=dict( name='Cleanup Job Details', description='Remove job history older than X days', created=now_dt, modified=now_dt, polymorphic_ctype=sjt_ct, ), ) if created: sjt.schedules.create( name='Cleanup Job Schedule', rrule='DTSTART:%s RRULE:FREQ=WEEKLY;INTERVAL=1;BYDAY=SU' % now_str, description='Automatically Generated Schedule', enabled=True, extra_data={'days': '120'}, created=now_dt, modified=now_dt, ) existing_cd_jobs = SystemJobTemplate.objects.filter(job_type='cleanup_deleted') Schedule.objects.filter(unified_job_template__in=existing_cd_jobs).delete() existing_cd_jobs.delete() sjt, created = SystemJobTemplate.objects.get_or_create( job_type='cleanup_activitystream', defaults=dict( name='Cleanup Activity Stream', description='Remove activity stream history older than X days', created=now_dt, modified=now_dt, polymorphic_ctype=sjt_ct, ), ) if created: sjt.schedules.create( name='Cleanup Activity Schedule', rrule='DTSTART:%s RRULE:FREQ=WEEKLY;INTERVAL=1;BYDAY=TU' % now_str, description='Automatically Generated Schedule', enabled=True, extra_data={'days': '355'}, created=now_dt, modified=now_dt, ) sjt, created = SystemJobTemplate.objects.get_or_create( job_type='cleanup_facts', defaults=dict( name='Cleanup Fact Details', description='Remove system tracking history', created=now_dt, modified=now_dt, polymorphic_ctype=sjt_ct, ), ) if created and feature_enabled('system_tracking', bypass_database=True): sjt.schedules.create( name='Cleanup Fact Schedule', rrule='DTSTART:%s RRULE:FREQ=MONTHLY;INTERVAL=1;BYMONTHDAY=1' % now_str, description='Automatically Generated Schedule', enabled=True, extra_data={'older_than': '120d', 'granularity': '1w'}, created=now_dt, modified=now_dt, ) class Migration(migrations.Migration): dependencies = [ ('main', '0009_v300_rbac_migrations'), ] operations = [ migrations.RunPython(create_system_job_templates, migrations.RunPython.noop), ]
Python
0
@@ -463,24 +463,74 @@ bTemplate')%0A + Schedule = apps.get_model('main', 'Schedule')%0A ContentT
129d96d9e1f4d247f48dec10ce551cd68efdf5d8
use metric name in append to time series
jira_integration_example/tests/jira_notification_handler_integration_test.py
jira_integration_example/tests/jira_notification_handler_integration_test.py
# Copyright 2020 Google, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import time import pytest from google.cloud import monitoring_v3 from google.api_core import exceptions from google.api_core import retry from jira import JIRA import main from tests import constants @retry.Retry(predicate=retry.if_exception_type(exceptions.NotFound), deadline=10) def short_retry(callable_function, *args): return callable_function(*args) @retry.Retry(predicate=retry.if_exception_type(AssertionError), deadline=180) def long_retry(callable_function, *args): return callable_function(*args) @pytest.fixture def config(): return main.app.config @pytest.fixture(scope='function') def jira_client(config): # setup oauth_dict = {'access_token': config['JIRA_ACCESS_TOKEN'], 'access_token_secret': config['JIRA_ACCESS_TOKEN_SECRET'], 'consumer_key': config['JIRA_CONSUMER_KEY'], 'key_cert': config['JIRA_KEY_CERT']} jira_client = JIRA(config['JIRA_URL'], oauth=oauth_dict) yield jira_client # tear down project_id = config['PROJECT_ID'] test_issues = jira_client.search_issues(f'description~"custom/integ-test-metric for {project_id}"') for issue in test_issues: issue.delete() @pytest.fixture(scope='function') def metric_descriptor(config, metric_name): # setup metric_client = monitoring_v3.MetricServiceClient() gcp_project_path = metric_client.project_path(config['PROJECT_ID']) test_metric_descriptor = constants.TEST_METRIC_DESCRIPTOR_TEMPLATE test_metric_descriptor['type'] = constants.TEST_METRIC_DESCRIPTOR_TEMPLATE['type'].format(METRIC_NAME=metric_name) metric_descriptor = metric_client.create_metric_descriptor( gcp_project_path, test_metric_descriptor) metric_descriptor = short_retry(metric_client.get_metric_descriptor, metric_descriptor.name) yield metric_descriptor # tear down metric_client.delete_metric_descriptor(metric_descriptor.name) @pytest.fixture(scope='function') def notification_channel(config): # setup notification_channel_client = monitoring_v3.NotificationChannelServiceClient() gcp_project_path = notification_channel_client.project_path(config['PROJECT_ID']) test_notification_channel = constants.TEST_NOTIFICATION_CHANNEL_TEMPLATE test_notification_channel['labels']['topic'] = constants.TEST_NOTIFICATION_CHANNEL_TEMPLATE['labels']['topic'].format(PROJECT_ID=config['PROJECT_ID']) notification_channel = notification_channel_client.create_notification_channel( gcp_project_path, test_notification_channel) notification_channel = short_retry(notification_channel_client.get_notification_channel, notification_channel.name) yield notification_channel # tear down notification_channel_client.delete_notification_channel(notification_channel.name) @pytest.fixture(scope='function') def alert_policy(config, notification_channel, alert_policy_name, metric_name): # setup policy_client = monitoring_v3.AlertPolicyServiceClient() gcp_project_path = policy_client.project_path(config['PROJECT_ID']) test_alert_policy = constants.TEST_ALERT_POLICY_TEMPLATE test_alert_policy['notification_channels'].append(notification_channel.name) test_alert_policy['display_name'] = alert_policy_name test_alert_policy['user_labels']['metric'] = metric_name metric_path = constants.METRIC_PATH.format(METRIC_NAME=metric_name) test_alert_policy['conditions'][0]['condition_threshold']['filter'] = test_alert_policy['conditions'][0]['condition_threshold']['filter'].format(METRIC_PATH=metric_path) alert_policy = policy_client.create_alert_policy( gcp_project_path, test_alert_policy) alert_policy = short_retry(policy_client.get_alert_policy, alert_policy.name) yield alert_policy # tear down policy_client.delete_alert_policy(alert_policy.name) def append_to_time_series(config, point_value): client = monitoring_v3.MetricServiceClient() gcp_project_path = client.project_path(config['PROJECT_ID']) series = monitoring_v3.types.TimeSeries() series.metric.type = constants.METRIC_PATH series.resource.type = constants.RESOURCE_TYPE series.resource.labels['instance_id'] = constants.INSTANCE_ID series.resource.labels['zone'] = constants.ZONE point = series.points.add() point.value.double_value = point_value now = time.time() point.interval.end_time.seconds = int(now) point.interval.end_time.nanos = int( (now - point.interval.end_time.seconds) * 10**9) client.create_time_series(gcp_project_path, [series]) @pytest.mark.parametrize('metric_name,alert_policy_name', [('integ-test-metric','integ-test-policy')]) def test_open_close_ticket(config, metric_descriptor, notification_channel, alert_policy, jira_client): # Sanity check that the test fixtures were initialized with values that the rest of the test expects assert metric_descriptor.type == constants.TEST_METRIC_DESCRIPTOR_TEMPLATE['type'].format(METRIC_NAME='integ-test-metric') assert notification_channel.display_name == constants.TEST_NOTIFICATION_CHANNEL_TEMPLATE['display_name'] assert alert_policy.display_name == 'integ-test-policy' assert alert_policy.notification_channels[0] == notification_channel.name def assert_jira_issue_is_created(): # Search for all issues where the status is 'unresolved' and # the integ-test-metric custom field is set to this the Cloud Monitoring project ID project_id = config['PROJECT_ID'] query_string = f'description~"custom/integ-test-metric for {project_id}" and status=10000' created_monitoring_issues = jira_client.search_issues(query_string) assert len(created_monitoring_issues) == 1 def assert_jira_issue_is_resolved(): # Search for all issues where the status is 'resolved' and # the integ-test-metric custom field is set to this the Cloud Monitoring project ID project_id = config['PROJECT_ID'] query_string = f'description~"custom/integ-test-metric for {project_id}" and status={config["CLOSED_JIRA_ISSUE_STATUS"]}' resolved_monitoring_issues = jira_client.search_issues(query_string) assert len(resolved_monitoring_issues) == 1 # trigger incident and check jira issue created append_to_time_series(config, constants.TRIGGER_NOTIFICATION_THRESHOLD_DOUBLE + 1) long_retry(assert_jira_issue_is_created) # issue status id for "To Do" # resolve incident and check jira issue resolved append_to_time_series(config, constants.TRIGGER_NOTIFICATION_THRESHOLD_DOUBLE) long_retry(assert_jira_issue_is_resolved)
Python
0.002114
@@ -4588,16 +4588,29 @@ (config, + metric_name, point_v @@ -4823,16 +4823,48 @@ RIC_PATH +.format(METRIC_NAME=metric_name) %0A ser
6dd4bdb35aca0f23ec5509e1a39748eeb146a720
Fix if statement
df_pipelines_init.py
df_pipelines_init.py
"""Initialise the DAGs for all the pipelines required to process different datasets containing MRI images""" # Please keep keywords airflow and DAG in this file, otherwise the safe mode in DagBag may skip this file import logging from airflow import configuration from common_steps import default_config from preprocessing_pipelines.mri_notify_failed_processing import mri_notify_failed_processing_dag from preprocessing_pipelines.mri_notify_skipped_processing import mri_notify_skipped_processing_dag from preprocessing_pipelines.mri_notify_successful_processing import mri_notify_successful_processing_dag from preprocessing_pipelines.continuously_pre_process_incoming import continuously_preprocess_incoming_dag from preprocessing_pipelines.daily_pre_process_incoming import daily_preprocess_incoming_dag from preprocessing_pipelines.flat_pre_process_incoming import flat_preprocess_incoming_dag from preprocessing_pipelines.pre_process_images import pre_process_images_dag from etl_pipelines.daily_ehr_incoming import daily_ehr_incoming_dag from etl_pipelines.flat_ehr_incoming import flat_ehr_incoming_dag from etl_pipelines.ehr_to_i2b2 import ehr_to_i2b2_dag from reorganisation_pipelines.flat_reorganise import flat_reorganisation_dag from reorganisation_pipelines.reorganise import reorganise_dag def register_dag(dag): dag_id = dag.dag_id var_name = "%s_dag" % dag_id globals()[var_name] = dag logging.info("Add DAG %s", dag_id) return dag_id def register_reorganisation_dags(dataset, dataset_section, email_errors_to): reorganisation_section = dataset_section + ':reorganisation' default_config(reorganisation_section, 'INPUT_FOLDER_DEPTH', '1') reorganisation_input_folder = configuration.get(reorganisation_section, 'INPUT_FOLDER') depth = int(configuration.get(reorganisation_section, 'INPUT_FOLDER_DEPTH')) max_active_runs = int(configuration.get(reorganisation_section, 'MAX_ACTIVE_RUNS')) reorganisation_pipelines = configuration.get(reorganisation_section, 'PIPELINES').split(',') if reorganisation_pipelines and len(reorganisation_pipelines) > 0 and reorganisation_pipelines[0] != '': reorganisation_dag_id = register_dag(reorganise_dag(dataset=dataset, section=reorganisation_section, email_errors_to=email_errors_to, max_active_runs=max_active_runs, reorganisation_pipelines=reorganisation_pipelines)) register_dag(flat_reorganisation_dag( dataset=dataset, folder=reorganisation_input_folder, depth=depth, email_errors_to=email_errors_to, trigger_dag_id=reorganisation_dag_id)) # endif def register_preprocessing_dags(dataset, dataset_section, email_errors_to): dataset_label = configuration.get(dataset_section, 'DATASET_LABEL') preprocessing_section = dataset_section + ':preprocessing' # Set the default configuration for the preprocessing of the dataset default_config(preprocessing_section, 'SCANNERS', 'daily') default_config(preprocessing_section, 'PIPELINES', 'copy_to_local,dicom_to_nifti,mpm_maps,neuro_morphometric_atlas') preprocessing_input_folder = configuration.get( preprocessing_section, 'INPUT_FOLDER') preprocessing_scanners = configuration.get( preprocessing_section, 'SCANNERS').split(',') preprocessing_pipelines = configuration.get( preprocessing_section, 'PIPELINES').split(',') max_active_runs = int(configuration.get(preprocessing_section, 'MAX_ACTIVE_RUNS')) logging.info("Create pipelines for dataset %s using scannners %s and pipelines %s", dataset_label, preprocessing_scanners, preprocessing_pipelines) pre_process_images_dag_id = register_dag(pre_process_images_dag(dataset=dataset, section=preprocessing_section, email_errors_to=email_errors_to, max_active_runs=max_active_runs, preprocessing_pipelines=preprocessing_pipelines)) if 'continuous' in preprocessing_scanners: register_dag(continuously_preprocess_incoming_dag( dataset=dataset, folder=preprocessing_input_folder, email_errors_to=email_errors_to, trigger_dag_id=pre_process_images_dag_id)) if 'daily' in preprocessing_scanners: register_dag(daily_preprocess_incoming_dag( dataset=dataset, folder=preprocessing_input_folder, email_errors_to=email_errors_to, trigger_dag_id=pre_process_images_dag_id)) if 'flat' in preprocessing_scanners: register_dag(flat_preprocess_incoming_dag( dataset=dataset, folder=preprocessing_input_folder, email_errors_to=email_errors_to, trigger_dag_id=pre_process_images_dag_id)) def register_ehr_dags(dataset, dataset_section, email_errors_to): ehr_section = dataset_section + ':ehr' # Set the default configuration for the preprocessing of the dataset default_config(ehr_section, 'SCANNERS', '') default_config(ehr_section, 'INPUT_FOLDER_DEPTH', '1') ehr_scanners = configuration.get(ehr_section, 'SCANNERS') max_active_runs = int(configuration.get(ehr_section, 'MAX_ACTIVE_RUNS')) if ehr_scanners != '': ehr_scanners = ehr_scanners.split(',') ehr_input_folder = configuration.get(ehr_section, 'INPUT_FOLDER') if 'daily' in ehr_scanners: register_dag(daily_ehr_incoming_dag( dataset=dataset, folder=ehr_input_folder, email_errors_to=email_errors_to, trigger_dag_id='%s_ehr_to_i2b2' % dataset.lower())) if 'flat' in ehr_scanners: ehr_input_folder_depth = int(configuration.get(ehr_section, 'INPUT_FOLDER_DEPTH')) register_dag(flat_ehr_incoming_dag( dataset=dataset, folder=ehr_input_folder, depth=ehr_input_folder_depth, email_errors_to=email_errors_to, trigger_dag_id='%s_ehr_to_i2b2' % dataset.lower())) register_dag(ehr_to_i2b2_dag(dataset=dataset, section=ehr_section, email_errors_to=email_errors_to, max_active_runs=max_active_runs)) def init_pipelines(): default_config('mipmap', 'DB_CONFIG_FILE', '/dev/null') dataset_sections = configuration.get('data-factory', 'DATASETS') email_errors_to = configuration.get('data-factory', 'EMAIL_ERRORS_TO') register_dag(mri_notify_failed_processing_dag()) register_dag(mri_notify_skipped_processing_dag()) register_dag(mri_notify_successful_processing_dag()) for dataset in dataset_sections.split(','): dataset_section = 'data-factory:%s' % dataset register_reorganisation_dags(dataset, dataset_section, email_errors_to) register_preprocessing_dags(dataset, dataset_section, email_errors_to) register_ehr_dags(dataset, dataset_section, email_errors_to) init_pipelines()
Python
1
@@ -6458,16 +6458,20 @@ wer()))%0A + regi @@ -6558,32 +6558,36 @@ + email_errors_to= @@ -6595,32 +6595,36 @@ mail_errors_to,%0A + @@ -6674,16 +6674,28 @@ _runs))%0A + # endif%0A %0A%0Adef in
a98ebc7728947e77d92378cdf867c500212738ca
Update dbx_email_alerts.py
Sharing/dbx_email_alerts.py
Sharing/dbx_email_alerts.py
#install the dropbox SDK with 'pip install dropbox' import dropbox import datetime import time import smtplib import requests #requires Dropbox Business API token with 'Team Auditing' permission token = "<enter token here>" cursor = None # instantiating dropbox team object dbxt = dropbox.DropboxTeam(token) # Full list of alerts available at: # https://www.dropbox.com/developers/documentation/http/teams#team_log-get_events alerts = ["sign_in_as_session_start", "member_change_admin_role", "shared_link_create", # "login_fail", # "shared_folder_create", # "file_request_create", # "account_capture_relinquish_account", # "shared_content_copy" ] # If using gmail, "enable less secure apps" needs to be turned on. # https://myaccount.google.com/security -> "Enable less secure apps" # For a more robust solution, use an email API tool e.g. Mailgun sender_email = "<sender_email@gmail.com>" sender_pw = "<sender_password" receiver_email = "<receiver_email>" def send_email(subject, body): s = smtplib.SMTP('smtp.gmail.com', 587) s.starttls() s.login(sender_email, sender_pw) message = "Subject: %s \n\n %s" % (subject, body) s.sendmail(sender_email, receiver_email, message) s.quit() def check_alerts(token): global cursor #On the first cycle, the cursor will be none. The cursor will be #updated on following cycles if cursor is None: # Start time has an offset of 1 minute from the current time. Can # optionally increase or decrease the start time offset. For example, # if you stop the script and plan to restart it 12 hours later, you may # want to increase the offset to 12 hours so that events in the 12 hours # prior to start are captured. start_time = datetime.datetime.utcnow() - datetime.timedelta(minutes=1) time_range = dropbox.team_common.TimeRange(start_time=start_time) log = dbxt.team_log_get_events(time=time_range) events = log.events cursor = log.cursor for event in events: if event.event_type._tag in alerts: email_subject = event.event_type._tag email_body = "Event was found at: %s" % event.timestamp send_email(email_subject, email_body) else: log = dbxt.team_log_get_events_continue(cursor) events = log.events cursor = log.cursor has_more = log.has_more for event in events: if event.event_type._tag in alerts: email_subject = event.event_type._tag email_body = "Event was found at: %s" % event.timestamp send_email(email_subject, email_body) # run the check alerts sequence on a 1 minute loop. while True: try: print(datetime.datetime.utcnow()) check_alerts(token) time.sleep(60) except requests.exceptions.ReadTimeout: print ("Request Timeout") except requests.exceptions.ConnectionError: print ("Connection Error") # Breaking on other errors and notifying of a required restart. # It is recommended to handle potential Dropbox and other # errors specifically except Exception as e: print(e) subject = "Alert Service Error - Restart Required" body = "Alert service ecountered an error and needs to be restarted: %s" % e send_email(subject, body) break
Python
0.000006
@@ -435,9 +435,9 @@ s = -%5B +%7B %22sig @@ -724,10 +724,9 @@ - %5D +%7D %0A%0A#
c00d2f55de2817616db49896b0de2a2b514800d3
replace values with constants
jira_integration_example/tests/jira_notification_handler_integration_test.py
jira_integration_example/tests/jira_notification_handler_integration_test.py
# Copyright 2020 Google, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import pytest from tests import constants import copy from jira import JIRA, Issue from google.cloud import monitoring_v3 from google.protobuf.duration_pb2 import Duration from google.api_core import exceptions from google.api_core import retry @retry.Retry(predicate=retry.if_exception_type(exceptions.NotFound), deadline=10) def call_get_metric(metric_client, name): return metric_client.get_metric_descriptor(name) @retry.Retry(predicate=retry.if_exception_type(exceptions.NotFound), deadline=10) def call_get_alert_policy(policy_client, name): return policy_client.get_alert_policy(name) @retry.Retry(predicate=retry.if_exception_type(exceptions.NotFound), deadline=10) def call_get_notification_channel(notification_channel_client, name): return notification_channel_client.get_notification_channel(name) @retry.Retry(predicate=retry.if_exception_type(AssertionError), deadline=120) def call_assert_jira_issue_created(jira_client): jira_client.create_issue.assert_called_once() @pytest.fixture(scope='function') def metric_descriptor(): # setup metric_client = monitoring_v3.MetricServiceClient() gcp_project_path = metric_client.project_path(constants.PROJECT_ID) metric_descriptor = metric_client.create_metric_descriptor( gcp_project_path, constants.TEST_METRIC_DESCRIPTOR) metric_descriptor = call_get_metric(metric_client, metric_descriptor.name) yield metric_descriptor # tear down metric_client.delete_metric_descriptor(metric_descriptor.name) @pytest.fixture(scope='function') def notification_channel(): # setup notification_channel_client = monitoring_v3.NotificationChannelServiceClient() gcp_project_path = notification_channel_client.project_path(constants.PROJECT_ID) notification_channel = notification_channel_client.create_notification_channel( gcp_project_path, constants.TEST_NOTIFICATION_CHANNEL) notification_channel = call_get_notification_channel(notification_channel_client, notification_channel.name) yield notification_channel # tear down notification_channel_client.delete_notification_channel(notification_channel.name) @pytest.fixture(scope='function') def alert_policy(metric_descriptor, notification_channel): # setup policy_client = monitoring_v3.AlertPolicyServiceClient() gcp_project_path = policy_client.project_path(constants.PROJECT_ID) print(metric_descriptor.name) print(notification_channel.name) test_alert_policy = constants.TEST_ALERT_POLICY_TEMPLATE test_alert_policy['notification_channels'].append(notification_channel.name) alert_policy = policy_client.create_alert_policy( gcp_project_path, test_alert_policy) alert_policy = call_get_alert_policy(policy_client, alert_policy.name) yield alert_policy # tear down policy_client.delete_alert_policy(alert_policy.name) def append_to_time_series(point_value): client = monitoring_v3.MetricServiceClient() gcp_project_path = client.project_path(constants.PROJECT_ID) series = monitoring_v3.types.TimeSeries() series.metric.type = 'custom.googleapis.com/' + metric_name series.resource.type = 'gce_instance' series.resource.labels['instance_id'] = '1234567890123456789' series.resource.labels['zone'] = 'us-central1-f' point = series.points.add() point.value.double_value = point_value now = time.time() point.interval.end_time.seconds = int(now) point.interval.end_time.nanos = int( ( - point.interval.end_time.seconds) * 10**9) client.create_time_series(gcp_project_path, [time_series]) def test_end_to_end(metric_descriptor, notification_channel, alert_policy, mocker): assert metric_descriptor.type == constants.TEST_METRIC_DESCRIPTOR['type'] assert notification_channel.display_name == constants.TEST_NOTIFICATION_CHANNEL['display_name'] assert alert_policy.display_name == constants.ALERT_POLICY_NAME assert alert_policy.user_labels == constants.TEST_ALERT_POLICY_TEMPLATE['user_labels'] assert alert_policy.notification_channels[0] == notification_channel.name # trigger incident append_to_time_series(constants.TRIGGER_NOTIFICATION_THRESHOLD_DOUBLE + 1) jira_client = mocker.create_autospec(JIRA, instance=True) call_assert_jira_issue_created(jira_client)
Python
0.001126
@@ -3773,46 +3773,29 @@ e = -'custom.googleapis.com/' + metric_name +constants.METRIC_PATH %0A @@ -3822,22 +3822,31 @@ e = -'gce_instance' +constants.RESOURCE_TYPE %0A @@ -3890,29 +3890,29 @@ %5D = -'1234567890123456789' +constants.INSTANCE_ID %0A @@ -3949,23 +3949,22 @@ %5D = -'us-central1-f' +constants.ZONE %0A
1de1a8e06cdca30974575e6e6308bdae75a9d5de
Update set_license_key.py
examples/Redfish/set_license_key.py
examples/Redfish/set_license_key.py
# Copyright 2020 Hewlett Packard Enterprise Development LP # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # -*- coding: utf-8 -*- """ An example of adding an license key for HPE iLO systems """ import sys import json from redfish import RedfishClient from redfish.rest.v1 import ServerDownOrUnreachableError from get_resource_directory import get_resource_directory def set_license_key(_redfishobj, ilo_key): ilo_lic_uri = None resource_instances = get_resource_directory(_redfishobj) if DISABLE_RESOURCE_DIR or not resource_instances: #if we do not have a resource directory or want to force it's non use to find the #relevant URI managers_uri = _redfishobj.root.obj['Managers']['@odata.id'] managers_response = _redfishobj.get(managers_uri) managers_members_uri = next(iter(managers_response.obj['Members']))['@odata.id'] managers_members_response = _redfishobj.get(managers_members_uri) ilo_lic_uri = managers_members_response.obj.Oem.Hpe.Links['LicenseService']['@odata.id'] else: #Use Resource directory to find the relevant URI for instance in resource_instances: if '#HpeiLOLicense.' in instance['@odata.type']: ilo_lic_uri = instance['@odata.id'] if ilo_lic_uri: ilo_license_collection = _redfishobj.get(ilo_lic_uri) ilo_license_member_uri = next(iter(ilo_license_collection.obj['Members']))['@odata.id'] try: ilo_license_data = _redfishobj.get(ilo_license_member_uri).obj['ConfirmationRequest']\ ['EON'] except KeyError: sys.stdout.write("This machine will not show the full License Key.\n") ilo_license_data = _redfishobj.get(ilo_license_member_uri).obj['LicenseKey'] sys.stdout.write("Current iLO License Data:\n") print(json.dumps(ilo_license_data, indent=4, sort_keys=True)) resp = _redfishobj.post(ilo_lic_uri, {'LicenseKey' : ilo_key}) #If iLO responds with soemthing outside of 200 or 201 then lets check the iLO extended info #error message to see what went wrong if resp.status == 400: try: print(json.dumps(resp.obj['error']['@Message.ExtendedInfo'], indent=4, \ sort_keys=True)) sys.stderr.write("Check the validity of your license key...\n") except Exception as excp: sys.stderr.write("A response error occurred, unable to access iLO " \ "Extended Message Info...") elif resp.status != 200: sys.stderr.write("An http response of \'%s\' was returned.\n" % resp.status) else: print("Success!\n") print(json.dumps(resp.dict, indent=4, sort_keys=True)) if __name__ == "__main__": # When running on the server locally use the following commented values #SYSTEM_URL = None #LOGIN_ACCOUNT = None #LOGIN_PASSWORD = None # When running remotely connect using the secured (https://) address, # account name, and password to send https requests # SYSTEM_URL acceptable examples: # "https://10.0.0.100" # "https://ilo.hostname" SYSTEM_URL = "https://10.0.0.100" LOGIN_ACCOUNT = "admin" LOGIN_PASSWORD = "password" # Must be a valid iLO License Key ILO_LICENSE_KEY = "XXXX-XXXX-XXXX-XXXX-XXXXX" # flag to force disable resource directory. Resource directory and associated operations are # intended for HPE servers. DISABLE_RESOURCE_DIR = True try: # Create a Redfish client object REDFISHOBJ = RedfishClient(base_url=SYSTEM_URL, username=LOGIN_ACCOUNT, \ password=LOGIN_PASSWORD) # Login with the Redfish client REDFISHOBJ.login() except ServerDownOrUnreachableError as excp: sys.stderr.write("ERROR: server not reachable or does not support RedFish.\n") sys.exit() set_license_key(REDFISHOBJ, ILO_LICENSE_KEY) REDFISHOBJ.logout()
Python
0.000002
@@ -1682,30 +1682,20 @@ - if '# -HpeiLOLicense +Manager .' i @@ -1737,34 +1737,179 @@ - ilo_lic_uri = instance +manager_uri = instance%5B'@odata.id'%5D%0A mager_data = _redfishobj.get(manager_uri)%0A ilo_lic_uri = mager_data.obj%5B'Oem'%5D%5B'Hpe'%5D%5B'Links'%5D%5B'LicenseService'%5D %5B'@o
edb1c61a7ded49b63e272bd409fcbf6468173948
remove comment
bitbots_head_behavior/src/bitbots_head_behavior/head_node.py
bitbots_head_behavior/src/bitbots_head_behavior/head_node.py
#!/usr/bin/env python3 """ This is the ROS-Node which contains the head behavior, starts the appropriate DSD, initializes the HeadBlackboard and subscribes to head_behavior specific ROS-Topics. """ import os import rospy from bitbots_blackboard.blackboard import HeadBlackboard from dynamic_stack_decider.dsd import DSD from humanoid_league_msgs.msg import HeadMode as HeadModeMsg, PoseWithCertainty, PoseWithCertaintyArray from bitbots_msgs.msg import JointCommand from sensor_msgs.msg import JointState from std_msgs.msg import Header from geometry_msgs.msg import PoseWithCovarianceStamped from moveit_ros_planning_interface._moveit_roscpp_initializer import roscpp_init, roscpp_shutdown from bitbots_ros_patches.rate import Rate def run(dsd): """ Main run-loop :returns: Never """ rate = Rate(60) while not rospy.is_shutdown(): dsd.update() rate.sleep() # Also stop cpp node roscpp_shutdown() def init(): """ Initialize new components needed for head_behavior: blackboard, dsd, rostopic subscriber """ rospy.init_node('head_behavior') # This is a general purpose initialization function provided by moved # It is used to correctly initialize roscpp which is used in the collision checker module roscpp_init('collision_checker', []) blackboard = HeadBlackboard() rospy.Subscriber('head_mode', HeadModeMsg, blackboard.head_capsule.head_mode_callback, queue_size=1) # rospy.Subscriber("balls_relative", PoseWithCertaintyArray, blackboard.world_model.balls_callback) rospy.Subscriber("ball_position_relative_filtered", PoseWithCovarianceStamped, blackboard.world_model.ball_filtered_callback) rospy.Subscriber('joint_states', JointState, blackboard.head_capsule.joint_state_callback) blackboard.head_capsule.position_publisher = rospy.Publisher("head_motor_goals", JointCommand, queue_size=10) blackboard.head_capsule.visual_compass_record_trigger = rospy.Publisher(blackboard.config['visual_compass_trigger_topic'], Header, queue_size=5) dirname = os.path.dirname(os.path.realpath(__file__)) dsd = DSD(blackboard, 'debug/dsd/head_behavior') dsd.register_actions(os.path.join(dirname, 'actions')) dsd.register_decisions(os.path.join(dirname, 'decisions')) dsd.load_behavior(os.path.join(dirname, 'head_behavior.dsd')) rospy.logdebug("Head Behavior completely loaded") return dsd if __name__ == '__main__': run(init())
Python
0
@@ -1462,112 +1462,8 @@ =1)%0A - # rospy.Subscriber(%22balls_relative%22, PoseWithCertaintyArray, blackboard.world_model.balls_callback)%0A
a65c57b85ecd57fdb8d0521c1b6ce3ecda5d3916
Add library to list of types to upgrade.
src/encoded/commands/upgrade.py
src/encoded/commands/upgrade.py
"""\ Run this to upgrade the site. Examples To update on the production server: %(prog)s production.ini For the development.ini you must supply the paster app name: %(prog)s development.ini --app-name app """ from contextlib import contextmanager import logging EPILOG = __doc__ logger = logging.getLogger(__name__) DEFAULT_COLLECTIONS = [ ] def internal_app(configfile, app_name=None, username=None): from webtest import TestApp from pyramid import paster app = paster.get_app(configfile, app_name) if not username: username = 'IMPORT' environ = { 'HTTP_ACCEPT': 'application/json', 'REMOTE_USER': username, } return TestApp(app, environ) def run(testapp, collections): from ..storage import DBSession with AlternateScope(DBSession) as scope: if not collections: collections = DEFAULT_COLLECTIONS root = testapp.app.root_factory(testapp.app) for collection_name in collections: collection = root[collection_name] count = 0 errors = 0 logger.info('Upgrading %s', collection_name) for uuid in collection: count += 1 with scope.change(): try: testapp.patch_json('/%s' % uuid, {}) except Exception: logger.exception('Upgrade failed for: /%s/%s', collection_name, uuid) errors += 1 if count % 1000 == 0: logger.info('Upgrading %s: %d', collection_name, count) logger.info('Upgraded %s: %d (errors: %d)', collection_name, count, errors) class AlternateScope(object): def __init__(self, DBSession): self.scope = None self._DBSession = DBSession def __enter__(self): import transaction from zope.sqlalchemy.datamanager import join_transaction from sqlalchemy.orm.scoping import ScopedRegistry self._original_registry = self._DBSession.registry self._DBSession.registry = ScopedRegistry( self._DBSession.session_factory, self._get_scope) self.scope = self txn = transaction.begin() session = self._DBSession() join_transaction(session) transaction.manager.free(txn) return self def __exit__(self, exc_type, exc_value, traceback): self._DBSession.registry = self._original_registry self.scope = None def _get_scope(self): return self.scope @contextmanager def change(self, scope=None): previous = self.scope self.scope = scope yield scope self.scope = previous def main(): import argparse parser = argparse.ArgumentParser( description="Update links and keys", epilog=EPILOG, formatter_class=argparse.RawDescriptionHelpFormatter, ) parser.add_argument('--app-name', help="Pyramid app name in configfile") parser.add_argument('--item-type', action='append', help="Item type") parser.add_argument('config_uri', help="path to configfile") args = parser.parse_args() logging.basicConfig() testapp = internal_app(args.config_uri, args.app_name) # Loading app will have configured from config file. Reconfigure here: logging.getLogger('encoded').setLevel(logging.DEBUG) run(testapp, args.item_type) if __name__ == '__main__': main()
Python
0
@@ -351,16 +351,31 @@ ONS = %5B%0A + 'library',%0A %5D%0A%0A%0Adef
f93ce7ca0c73946e1997572576b28436d53ef970
Update to V2 API
salt/modules/opsgenie.py
salt/modules/opsgenie.py
# -*- coding: utf-8 -*- ''' Module for sending data to OpsGenie .. versionadded:: 2018.3.0 :configuration: This module can be used in Reactor System for posting data to OpsGenie as a remote-execution function. For example: .. code-block:: yaml opsgenie_event_poster: local.opsgenie.post_data: - tgt: 'salt-minion' - kwarg: name: event.reactor api_key: XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX reason: {{ data['data']['reason'] }} action_type: Create ''' # Import Python libs from __future__ import absolute_import, print_function, unicode_literals import logging import requests # Import Salt libs import salt.exceptions import salt.utils.json API_ENDPOINT = "https://api.opsgenie.com/v1/json/saltstack?apiKey=" log = logging.getLogger(__name__) def post_data(api_key=None, name='OpsGenie Execution Module', reason=None, action_type=None): ''' Post data to OpsGenie. It's designed for Salt's Event Reactor. After configuring the sls reaction file as shown above, you can trigger the module with your designated tag (og-tag in this case). CLI Example: .. code-block:: bash salt-call event.send 'og-tag' '{"reason" : "Overheating CPU!"}' Required parameters: api_key It's the API Key you've copied while adding integration in OpsGenie. reason It will be used as alert's default message in OpsGenie. action_type OpsGenie supports the default values Create/Close for action_type. You can customize this field with OpsGenie's custom actions for other purposes like adding notes or acknowledging alerts. Optional parameters: name It will be used as alert's alias. If you want to use the close functionality you must provide name field for both states like in this case. ''' if api_key is None or reason is None or action_type is None: raise salt.exceptions.SaltInvocationError( 'API Key or Reason or Action Type cannot be None.') data = dict() data['name'] = name data['reason'] = reason data['actionType'] = action_type data['cpuModel'] = __grains__['cpu_model'] data['cpuArch'] = __grains__['cpuarch'] data['fqdn'] = __grains__['fqdn'] data['host'] = __grains__['host'] data['id'] = __grains__['id'] data['kernel'] = __grains__['kernel'] data['kernelRelease'] = __grains__['kernelrelease'] data['master'] = __grains__['master'] data['os'] = __grains__['os'] data['saltPath'] = __grains__['saltpath'] data['saltVersion'] = __grains__['saltversion'] data['username'] = __grains__['username'] data['uuid'] = __grains__['uuid'] log.debug('Below data will be posted:\n%s', data) log.debug('API Key: %s \t API Endpoint: %s', api_key, API_ENDPOINT) response = requests.post( url=API_ENDPOINT + api_key, data=salt.utils.json.dumps(data), headers={'Content-Type': 'application/json'}) return response.status_code, response.text
Python
0
@@ -804,32 +804,16 @@ om/v -1/json/saltstack?apiKey= +2/alerts %22%0A%0Al @@ -1957,31 +1957,8 @@ is -None or action_type is None @@ -2045,23 +2045,8 @@ son -or Action Type cann @@ -2088,20 +2088,21 @@ data%5B' -name +alias '%5D = nam @@ -2113,22 +2113,23 @@ data%5B' -reason +message '%5D = rea @@ -2131,24 +2131,26 @@ = reason%0A + # data%5B'actio @@ -2150,20 +2150,17 @@ %5B'action -Type +s '%5D = act @@ -2853,16 +2853,52 @@ POINT)%0A%0A + if action_type == %22Create%22:%0A resp @@ -2927,16 +2927,20 @@ + url=API_ @@ -2951,20 +2951,294 @@ OINT - + api_key,%0A +,%0A data=salt.utils.json.dumps(data),%0A headers=%7B'Content-Type': 'application/json',%0A 'Authorization': 'GenieKey ' + api_key%7D)%0A else:%0A response = requests.post(%0A url=API_ENDPOINT + %22/%22 + name + %22/close?identifierType=alias%22,%0A @@ -3275,32 +3275,36 @@ (data),%0A + + headers=%7B'Conten @@ -3330,19 +3330,81 @@ on/json' +,%0A 'Authorization': 'GenieKey ' + api_key %7D)%0A +%0A retu
0fceb297dc4855cd5617daaf9821fb3a332c19ed
Fix descriptions
mediacrush/slimdown.py
mediacrush/slimdown.py
from functools import partial from markdown import Markdown, odict from markdown.blockprocessors import build_block_parser from markdown.preprocessors import build_preprocessors from markdown.inlinepatterns import build_inlinepatterns from markdown.treeprocessors import build_treeprocessors slimdown = Markdown(safe_mode="escape") # Remove some block parsers block = build_block_parser(slimdown) del block.blockprocessors["hashheader"] del block.blockprocessors["setextheader"] del block.blockprocessors["olist"] del block.blockprocessors["ulist"] slimdown.parser = block # Delete most inline patterns inline = build_inlinepatterns(slimdown) del inline["backtick"] del inline["reference"] del inline["image_link"] del inline["image_reference"] del inline["short_reference"] del inline["autolink"] del inline["automail"] del inline["entity"] slimdown.inlinePatterns = inline
Python
0.00079
@@ -872,8 +872,205 @@ inline%0A +%0A# Monkey-patch unicode fix%0Aslimdown._convert = slimdown.convert%0Adef slimdown_convert(text):%0A text = text.decode('utf-8')%0A%0A return slimdown._convert(text)%0Aslimdown.convert = slimdown_convert%0A
2871d4d45c70cb3619bbf12ed77a8a94e038702e
Set default language to finnish should the mail be sent to admins
ckanext/ytp/request/mail.py
ckanext/ytp/request/mail.py
from ckan.lib.i18n import set_lang, get_lang from ckan.lib.mailer import mail_user from pylons import i18n from ckan.common import _ import logging log = logging.getLogger(__name__) _SUBJECT_MEMBERSHIP_REQUEST = lambda: _( "New membership request (%(organization)s)") _MESSAGE_MEMBERSHIP_REQUEST = lambda: _("""\ User %(user)s (%(email)s) has requested membership to organization %(organization)s. %(link)s Best regards Avoindata.fi support valtori@avoindata.fi """) _SUBJECT_MEMBERSHIP_APPROVED = lambda: _( "Organization membership approved (%(organization)s)") _MESSAGE_MEMBERSHIP_APPROVED = lambda: _("""\ Your membership request to organization %(organization)s with %(role)s access has been approved. Best regards Avoindata.fi support valtori@avoindata.fi """) _SUBJECT_MEMBERSHIP_REJECTED = lambda: _( "Organization membership rejected (%(organization)s)") _MESSAGE_MEMBERSHIP_REJECTED = lambda: _("""\ Your membership request to organization %(organization)s with %(role)s access has been rejected. Best regards Avoindata.fi support valtori@avoindata.fi """) def mail_new_membership_request(locale, admin, group_name, url, user_name, user_email): current_locale = get_lang() if locale == 'en': _reset_lang() else: set_lang(locale) subject = _SUBJECT_MEMBERSHIP_REQUEST() % { 'organization': group_name } message = _MESSAGE_MEMBERSHIP_REQUEST() % { 'user': user_name, 'email': user_email, 'organization': group_name, 'link': url } try: mail_user(admin, subject, message) except Exception: log.exception("Mail could not be sent") finally: set_lang(current_locale) def mail_process_status(locale, member_user, approve, group_name, capacity): current_locale = get_lang() if locale == 'en': _reset_lang() else: set_lang(locale) role_name = _(capacity) subject_template = _SUBJECT_MEMBERSHIP_APPROVED( ) if approve else _SUBJECT_MEMBERSHIP_REJECTED() message_template = _MESSAGE_MEMBERSHIP_APPROVED( ) if approve else _MESSAGE_MEMBERSHIP_REJECTED() subject = subject_template % { 'organization': group_name } message = message_template % { 'role': role_name, 'organization': group_name } try: mail_user(member_user, subject, message) except Exception: log.exception("Mail could not be sent") # raise MailerException("Mail could not be sent") finally: set_lang(current_locale) def _reset_lang(): try: i18n.set_lang(None) except TypeError: pass
Python
0
@@ -1181,72 +1181,135 @@ -current_locale = get_lang()%0A%0A if locale == 'en':%0A _res +#Mail sent to admins should be sent with default locale, i.e. finnish not the locale defined by the user%0A current_locale = g et_l @@ -1314,34 +1314,56 @@ _lang()%0A +%0A -else:%0A +#Set the locale to default %0A +_re set_lang @@ -1363,22 +1363,16 @@ et_lang( -locale )%0A su
9f26c83b1ba25d8c64a8e9418310f5dd0e6cb9bd
refactor clean
estudios_socioeconomicos/views.py
estudios_socioeconomicos/views.py
import csv import json from collections import OrderedDict from django.shortcuts import render, get_object_or_404 from django.contrib.auth.decorators import login_required, user_passes_test from django.http import HttpResponse from rest_framework.response import Response import django_excel as excel from administracion.models import Escuela, Colegiatura from becas.models import Beca from captura.utils import get_study_info from captura.models import Retroalimentacion from perfiles_usuario.utils import is_capturista, is_member, ADMINISTRADOR_GROUP, CAPTURISTA_GROUP from perfiles_usuario.utils import is_administrador from familias.models import Integrante, Familia, Comentario, Integrante, Alumno, Tutor from familias.utils import total_egresos_familia, total_ingresos_familia, total_neto_familia from indicadores.models import Transaccion, Ingreso, Oficio, Periodo from .models import Estudio, Foto, Seccion, Subseccion, Pregunta, OpcionRespuesta, Respuesta from .serializers import EstudioSerializer @login_required @user_passes_test(is_administrador) def download_studies(request): """ View for an administrator to make a database dump into an excell sheet. Each table will be emptied to a page inside the excell document. """ return excel.make_response_from_tables( [ Transaccion, Ingreso, Oficio, Periodo, Integrante, Familia, Comentario, Integrante, Alumno, Tutor, Estudio, Seccion, Subseccion, Pregunta, OpcionRespuesta, Respuesta, Retroalimentacion, Beca, Escuela, Colegiatura ], 'xls', file_name="JP2_ESTUDIOS_SOCIOECONOMICOS") @login_required @user_passes_test(lambda u: is_member(u, [ADMINISTRADOR_GROUP, CAPTURISTA_GROUP])) def focus_mode(request, id_estudio): """ View to see the detail information about a family and their study. """ context = {} estudio = get_object_or_404(Estudio.objects.filter(pk=id_estudio)) if is_capturista(request.user): get_object_or_404( Estudio.objects.filter(pk=id_estudio), capturista=request.user.capturista) integrantes = Integrante.objects.filter(familia=estudio.familia).select_related() fotos = Foto.objects.filter(estudio=id_estudio) context['estudio'] = estudio context['integrantes'] = integrantes context['fotos'] = fotos context['total_egresos_familia'] = total_egresos_familia(estudio.familia.id) context['total_ingresos_familia'] = total_ingresos_familia(estudio.familia.id) context['total_neto_familia'] = total_neto_familia(estudio.familia.id) transacciones = Transaccion.objects.filter(es_ingreso=True, familia=estudio.familia) context['ingresos'] = Ingreso.objects.filter(transaccion__in=transacciones) context['egresos'] = Transaccion.objects.filter(es_ingreso=False, familia=estudio.familia) context['cuestionario'] = get_study_info(estudio) context['status_options'] = Estudio.get_options_status() return render( request, 'estudios_socioeconomicos/focus_mode.html', context)
Python
0.000278
@@ -1,64 +1,4 @@ -import csv%0Aimport json%0Afrom collections import OrderedDict%0A%0A from @@ -128,90 +128,8 @@ est%0A -from django.http import HttpResponse%0A%0Afrom rest_framework.response import Response %0Aimp @@ -533,36 +533,24 @@ Comentario, - Integrante, Alumno, Tut @@ -812,51 +812,8 @@ sta%0A -from .serializers import EstudioSerializer%0A %0A%0A@l @@ -1115,17 +1115,16 @@ %5B - %0A @@ -1166,17 +1166,16 @@ Periodo, - %0A @@ -1211,28 +1211,16 @@ entario, - Integrante, Alumno, @@ -1442,20 +1442,16 @@ MICOS%22)%0A - %0A%0A@login
dbb538d84d958d964319bf287a7bdc9587c62f15
Use realistic identifiers in strategy
services/tests/services_import_hypothesis.py
services/tests/services_import_hypothesis.py
# from hypothesis import composite from hypothesis import event from hypothesis.strategies import ( text, integers, booleans, lists, composite, uuids, sampled_from, none, one_of, permutations, sets) from string import digits, ascii_letters, punctuation from django.conf import settings LANGUAGES = [l[0] for l in settings.LANGUAGES] RESOURCES = [ 'unit', 'organization', 'department', 'ontologyword', 'ontologytree', ] def int_keys(draw): return draw(lists( integers(min_value=1, max_value=15), min_size=1, max_size=3, unique=True)) def uuid_keys(draw): return draw(lists(uuids(), min_size=2, max_size=3)) def translated_field(draw, name, allow_missing=True, languages=LANGUAGES): result = {} for lang in languages: if allow_missing: val = draw(one_of(text(), none())) else: val = draw(text(min_size=1)) if val is not None: result['{}_{}'.format(name, lang)] = val return result VIEWPOINTS = ['00', '11', '12', '13', '21', '22', '23', '31', '32', '33', '41', '51', '52', '61'] VIEWPOINT_STATES = ['green', 'red', 'unknown'] MUNICIPALITIES = [ # TODO: read from independent source { 'fi': 'Helsinki', 'sv': 'Helsingfors', 'en': 'Helsinki' }, { 'fi': 'Espoo', 'sv': 'Esbo', 'en': 'Espoo' }, { 'fi': 'Kauniainen', 'sv': 'Grankulla', 'en': 'Kauniainen' }, { 'fi': 'Vantaa', 'sv': 'Vanda', 'en': 'Vantaa' } ] def accessibility_viewpoints(draw): return ','.join([ '{}:{}'.format(key, draw(sampled_from(VIEWPOINT_STATES))) for key in VIEWPOINTS]) PROVIDER_TYPES = [ "CONTRACT_SCHOOL", "OTHER_PRODUCTION_METHOD", "PAYMENT_COMMITMENT", "PURCHASED_SERVICE", "SELF_PRODUCED", "SUPPORTED_OPERATIONS", "UNKNOWN_PRODUCTION_METHOD"] ORGANIZER_TYPES = [ "ASSOCIATION", "FOUNDATION", "GOVERNMENT", "GOVERNMENTAL_COMPANY", "JOINT_MUNICIPAL_AUTHORITY", "MUNICIPAL_ENTERPRISE_GROUP", "MUNICIPALITY", "MUNICIPALLY_OWNED_COMPANY", "ORGANIZATION", "OTHER_REGIONAL_COOPERATION_ORGANIZATION", "PRIVATE_ENTERPRISE", "UNKNOWN" ] @composite def make_source(draw): return {'id': draw(text(min_size=1)), 'source': draw(text(min_size=1))} def unit_maker(draw, resource_ids): def make_unit(uid): # Required fields result = { 'id': uid, 'accessibility_viewpoints': accessibility_viewpoints(draw), 'dept_id': str(draw(sampled_from(resource_ids['department']))), 'org_id': str(draw(sampled_from(resource_ids['organization']))), 'ontologyword_ids': draw(permutations(resource_ids['ontologyword'])), 'ontologytree_ids': draw(permutations(resource_ids['ontologytree'])), 'provider_type': draw(sampled_from(PROVIDER_TYPES)), 'organizer_type': draw(sampled_from(ORGANIZER_TYPES)), 'manual_coordinates': draw(booleans()), # TODO: cannot test is_public=False until there is a mechanism # for getting non-public units from the API. 'is_public': True, # TODO: map to another field } result.update(translated_field(draw, 'name', allow_missing=False)) def add_optional_field(name, strategy): val = draw(one_of(none(), strategy)) if val is not None: event('unit.{}: optional field given value'.format(name)) result[name] = val else: event('unit.{}: optional field missing'.format(name)) def add_optional_text_field(name): add_optional_field(name, text()) add_optional_field('address_city', sampled_from(MUNICIPALITIES)) add_optional_field('address_zip', text(max_size=10)) add_optional_field('organizer_business_id', text(max_size=10)) for field in ['accessibility_email', 'accessibility_www', 'data_source_url', 'email', 'fax', 'phone', 'picture_entrance_url', 'picture_url', 'streetview_entrance_url']: add_optional_text_field(field) result.update(translated_field(draw, 'address_postal_full', allow_missing=True)) result.update(translated_field(draw, 'call_charge_info', allow_missing=True)) result.update(translated_field(draw, 'desc', allow_missing=True)) result.update(translated_field(draw, 'picture_caption', allow_missing=True)) # Extra searchwords for lang in LANGUAGES: words = draw(sets(text(digits + ascii_letters + punctuation + 'åäöÅÄÖ ', min_size=1, max_size=25))) if len(words) == 0: event('extra searchwords empty') words = None else: words = ', '.join(words) result['extra_searchwords_{}'.format(lang)] = words add_optional_field('sources', lists(make_source(), min_size=1, max_size=2)) return result return make_unit def organization_maker(*args): return lambda x: {'id': str(x)} def department_maker(draw, resource_ids): def make_department(did): return { 'id': str(did), 'hierarchy_level': 0, 'org_id': str(draw(sampled_from(resource_ids['organization']))) } return make_department def ontologyword_maker(*args): return lambda x: {'id': x} def ontologytree_maker(*args): return lambda x: {'id': x} make_resource = {} for r in RESOURCES: make_resource[r] = locals()['{}_maker'.format(r)] @composite def closed_object_set(draw): ids = { 'unit': int_keys(draw), 'organization': uuid_keys(draw), 'department': uuid_keys(draw), 'ontologyword': int_keys(draw), 'ontologytree': int_keys(draw) } resources = {} for key, identifiers in ids.items(): resources[key] = list(map(make_resource[key](draw, ids), ids[key])) return resources
Python
0.000109
@@ -289,16 +289,69 @@ ttings%0A%0A +SAFE_LETTERS = digits + ascii_letters + punctuation%0A%0A LANGUAGE @@ -2377,32 +2377,46 @@ 'id': draw(text( +SAFE_LETTERS, min_size=1)), 's @@ -2425,32 +2425,46 @@ rce': draw(text( +SAFE_LETTERS, min_size=1))%7D%0A%0A%0A @@ -4907,44 +4907,20 @@ ext( -digits + ascii_letters + punctuation +SAFE_LETTERS + '
a25560008f6d83e1918f7f143bc40a6e44db7924
Set the initial x-axes limits to be something resonable for the problem.
examples/models/transform_jitter.py
examples/models/transform_jitter.py
import numpy as np from bokeh.io import vplot, hplot from bokeh.plotting import figure, show, output_file from bokeh.models.sources import ColumnDataSource from bokeh.models import Slider, CustomJS, Dropdown, Toggle, Paragraph from bokeh.models.transforms import Jitter N = 50 source = ColumnDataSource(data=dict(x=[1]*N + [2]*N, xp=[1]*N + [2]*N, xplot=[1]*N + [2]*N, col=['#ab324b']*N + ['#0022aa']*N, y=np.random.random(2*N)*10)) jitter = Jitter(mean=0, width=0) p = figure(x_range=(-10,10), y_range=(0,10)) scatter_obj = p.scatter(x='xplot', y='y', color='col', source=source, size = 10, alpha=0.5) enable_callback=CustomJS(args=dict(scatter_obj=scatter_obj, source=source, figure=p, jitter=jitter), code=""" if(button.get('active') == true) { var data=source.get('data') for (i=0; i < data['y'].length; i++) { data['xplot'][i] = data['xp'][i] } } else { var data=source.get('data') for (i=0; i < data['y'].length; i++) { data['xplot'][i] = data['x'][i] } } source.trigger('change') figure.trigger('change') """) width_callback=CustomJS(args=dict(jitter=jitter, source=source, figure=p), code=""" jitter.set('width', slider.get('value')) data=source.get('data') for (i=0; i < data['y'].length; i++) { data['xp'][i] = jitter.compute(data['x'][i]) } if(button.get('active') == true) { var data=source.get('data') for (i=0; i < data['y'].length; i++) { data['xplot'][i] = data['xp'][i] } } source.trigger('change') figure.trigger('change') """) center_callback=CustomJS(args=dict(jitter=jitter, source=source, figure=p), code=""" jitter.set('mean', slider.get('value')) data=source.get('data') for (i=0; i < data['y'].length; i++) { data['xp'][i] = jitter.compute(data['x'][i]) } if(button.get('active') == true) { var data=source.get('data') for (i=0; i < data['y'].length; i++) { data['xplot'][i] = data['xp'][i] } } source.trigger('change') figure.trigger('change') """) distribution_callback=CustomJS(args=dict(jitter=jitter, source=source, figure=p), code=""" jitter.set('distribution', menu.get('value')) data=source.get('data') for (i=0; i < data['y'].length; i++) { data['xp'][i] = jitter.compute(data['x'][i]) } if(button.get('active') == true) { var data=source.get('data') for (i=0; i < data['y'].length; i++) { data['xplot'][i] = data['xp'][i] } } source.trigger('change') figure.trigger('change') """) enable_button = Toggle(label='Enable Jitter', type='success', callback=enable_callback) enable_callback.args['button'] = enable_button width_slider = Slider(start=0, end=2, value=0, step=0.01, title='Width', callback=width_callback, callback_policy='continuous') width_callback.args['slider'] = width_slider width_callback.args['button'] = enable_button center_slider = Slider(start=-1, end=1, value=0, step=0.01, title='Center', callback=center_callback, callback_policy='continuous') center_callback.args['slider'] = center_slider center_callback.args['button'] = enable_button distribution_dropdown = Dropdown(label='Distribution', type='success', menu=[('Uniform', 'uniform'), ('Normal', 'normal')], callback=distribution_callback) distribution_callback.args['menu'] = distribution_dropdown distribution_callback.args['button'] = enable_button title = Paragraph(text='Jitter Parameters') spacer = Paragraph(text=' ') output_file("transform_jitter.html", title="Example Transforms") show(hplot(p, vplot(enable_button, spacer, title, spacer, center_slider, width_slider, distribution_dropdown)))
Python
0
@@ -487,14 +487,12 @@ ge=( --10,10 +0, 3 ), y
a23a1050501563889c2806a514fe2994a2ebe3a8
Add python3 support in example
example/consume_many_csv_files.py
example/consume_many_csv_files.py
from __future__ import print_function from itertools import chain from itertools import imap import karld from karld.path import i_walk_csv_paths def main(): """ Consume many csv files as if one. """ import pathlib input_dir = pathlib.Path('test_data/things_kinds') # # Use a generator expression # iterables = (karld.io.i_get_csv_data(data_path) # for data_path in i_walk_csv_paths(str(input_dir))) # # or a generator map. iterables = imap(karld.io.i_get_csv_data, i_walk_csv_paths(str(input_dir))) items = chain.from_iterable(iterables) for item in items: print(item[0], item[1]) if __name__ == "__main__": main()
Python
0.000001
@@ -59,16 +59,26 @@ t chain%0A +%0Atry:%0A from ite @@ -95,16 +95,69 @@ ort imap +%0Aexcept ImportError:%0A # if python 3%0A imap = map %0A%0Aimport
d27c34c65198280e324c37acda7f33ece07c2c92
make text field usage consistnt
examples/plotting/server/markers.py
examples/plotting/server/markers.py
# The plot server must be running # Go to http://localhost:5006/bokeh to view this plot from numpy.random import random from bokeh.plotting import * def mscatter(p, x, y, typestr): p.scatter(x, y, marker=typestr, line_color="#6666ee", fill_color="#ee6666", fill_alpha=0.5, size=12) def mtext(p, x, y, textstr): p.text(x, y, text=textstr, text_color="#449944", text_align="center", text_font_size="10pt") output_server("markers") p = figure(title="markers.py example") N = 10 mscatter(p, random(N)+2, random(N)+1, "circle") mscatter(p, random(N)+4, random(N)+1, "square") mscatter(p, random(N)+6, random(N)+1, "triangle") mscatter(p, random(N)+8, random(N)+1, "asterisk") mscatter(p, random(N)+2, random(N)+4, "circle_x") mscatter(p, random(N)+4, random(N)+4, "square_x") mscatter(p, random(N)+6, random(N)+4, "inverted_triangle") mscatter(p, random(N)+8, random(N)+4, "x") mscatter(p, random(N)+2, random(N)+7, "circle_cross") mscatter(p, random(N)+4, random(N)+7, "square_cross") mscatter(p, random(N)+6, random(N)+7, "diamond") mscatter(p, random(N)+8, random(N)+7, "cross") mtext(p, [2.5], [0.5], "circle / o") mtext(p, [4.5], [0.5], "square") mtext(p, [6.5], [0.5], "triangle") mtext(p, [8.5], [0.5], "asterisk / *") mtext(p, [2.5], [3.5], "circle_x / ox") mtext(p, [4.5], [3.5], "square_x") mtext(p, [6.5], [3.5], "inverted_triangle") mtext(p, [8.5], [3.5], "x") mtext(p, [2.5], [6.5], "circle_cross / o+") mtext(p, [4.5], [6.5], "square_cross") mtext(p, [6.5], [6.5], "diamond") mtext(p, [8.5], [6.5], "cross / +") show(p) # open a browser
Python
0.000004
@@ -346,23 +346,25 @@ y, text= +%5B textstr +%5D ,%0A
a6b8b040aab2fd8fb79c027e9f098d6748c6d52a
remove unique constraint here
dishpub/dishpubdb.py
dishpub/dishpubdb.py
from sqlalchemy import create_engine from sqlalchemy import Table, Column, Integer, String, MetaData, ForeignKey, Boolean, DateTime from sqlalchemy.orm import mapper from sqlalchemy import ForeignKey from sqlalchemy.orm import backref try: from sqlalchemy.orm import relationship except: from sqlalchemy.orm import relation as relationship from sqlalchemy import Sequence from sqlalchemy.orm import sessionmaker from sqlalchemy.ext.declarative import declarative_base import datetime from sqlalchemy.schema import UniqueConstraint Base = declarative_base() class Endorser(Base): __tablename__ = 'endorser' id = Column(Integer, primary_key=True) subject = Column(String(100),nullable = False,unique=True) def __init__(self,subject): self.subject = subject def __repr__(self): return "<Endorser('%s')>" % (self.subject) class EndorserMetadata(Base): __tablename__ = 'endorserMetadata' id = Column(Integer, primary_key=True) fkEndorser = Column(Integer, ForeignKey(Endorser.id, onupdate="CASCADE", ondelete="CASCADE")) key = Column(String(200),nullable = False,unique=True) value = Column(String(200),nullable = False) # explicit/composite unique constraint. 'name' is optional. UniqueConstraint('fkEndorser', 'key') def __init__(self,imagelist,key,value): self.fkEndorser = imagelist self.key = key self.value = value def __repr__(self): return "<EndorserMetadata('%s','%s', '%s')>" % (self.fkEndorser, self.key, self.value) class Imagelist(Base): __tablename__ = 'imagelist' id = Column(Integer, primary_key=True) identifier = Column(String(50),nullable = False,unique=True) # Line woudl be but for inconsitancy #imagelist_latest =Column(Integer, ForeignKey('imagelist.id')) orm_metadata = relationship("ImagelistMetadata", backref="Imagelist",cascade='all, delete') # The data of the last update to the subscription. # This is different from the creation time of the image list. # It is provided only for instrumentation purposes. updated = Column(DateTime) def __init__(self,identifier): self.identifier = identifier def __repr__(self): return "<Imagelist('%s')>" % (self.identifier) class ImagelistMetadata(Base): __tablename__ = 'ImagelistMetadata' id = Column(Integer, primary_key=True) fkImageList = Column(Integer, ForeignKey(Imagelist.id, onupdate="CASCADE", ondelete="CASCADE")) key = Column(String(200),nullable = False,unique=True) value = Column(String(200),nullable = False) UniqueConstraint('fkImageList', 'key') def __init__(self,imagelist,key,value): self.fkImageList = imagelist self.key = key self.value = value def __repr__(self): return "<ImagelistMetadata('%s','%s', '%s')>" % (self.fkImageList, self.key, self.value) class Image(Base): __tablename__ = 'Image' id = Column(Integer, primary_key=True) identifier = Column(String(50),nullable = False,unique=True) fkImageList = Column(Integer, ForeignKey(Imagelist.id, onupdate="CASCADE", ondelete="CASCADE")) def __init__(self,imagelist,identifier): self.fkImageList = imagelist self.identifier = identifier def __repr__(self): return "<Image('%s','%s', '%s')>" % (self.fkImageList, self.key, self.value) class ImageMetadata(Base): __tablename__ = 'ImageMetadata' id = Column(Integer, primary_key=True) fkImage = Column(Integer, ForeignKey(Image.id, onupdate="CASCADE", ondelete="CASCADE")) key = Column(String(200),nullable = False) value = Column(String(200),nullable = False) def __init__(self,image,key,value): self.fkImage = image self.key = key self.value = value def __repr__(self): return "<ImageMetadata('%s','%s', '%s')>" % (self.fkImage, self.key, self.value) class Endorsement(Base): __tablename__ = 'Endorsement' id = Column(Integer, primary_key=True) fkImageList = Column(Integer, ForeignKey(Imagelist.id, onupdate="CASCADE", ondelete="CASCADE")) fkEndorser = Column(Integer, ForeignKey(Endorser.id, onupdate="CASCADE", ondelete="CASCADE")) def __init__(self,imagelist,endorser): self.fkImageList = imagelist self.fkEndorser = endorser def init(engine): Base.metadata.create_all(engine)
Python
0.000236
@@ -538,16 +538,111 @@ traint%0A%0A +%0A##########################################%0A# makes key value tables to increase flexibility.%0A%0A Base = d @@ -1210,36 +1210,24 @@ able = False -,unique=True )%0A value @@ -2611,36 +2611,24 @@ able = False -,unique=True )%0A value
bfdfbe884fbadf04dd42e6e55e3c4ac633005a71
Raise the exception even if we have a logger.
yapf/yapflib/yapf_api.py
yapf/yapflib/yapf_api.py
# Copyright 2015 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Entry points for YAPF. The main APIs that YAPF exposes to drive the reformatting. FormatFile(): reformat a file. FormatCode(): reformat a string of code. These APIs have some common arguments: style_config: (string) Either a style name or a path to a file that contains formatting style settings. If None is specified, use the default style as set in style.DEFAULT_STYLE_FACTORY lines: (list of tuples of integers) A list of tuples of lines, [start, end], that we want to format. The lines are 1-based indexed. It can be used by third-party code (e.g., IDEs) when reformatting a snippet of code rather than a whole file. print_diff: (bool) Instead of returning the reformatted source, return a diff that turns the formatted source into reformatter source. verify: (bool) True if reformatted code should be verified for syntax. """ import difflib import logging import re import sys from lib2to3.pgen2 import tokenize from yapf.yapflib import blank_line_calculator from yapf.yapflib import comment_splicer from yapf.yapflib import continuation_splicer from yapf.yapflib import py3compat from yapf.yapflib import pytree_unwrapper from yapf.yapflib import pytree_utils from yapf.yapflib import reformatter from yapf.yapflib import split_penalty from yapf.yapflib import style from yapf.yapflib import subtype_assigner def FormatFile(filename, style_config=None, lines=None, print_diff=False, verify=True): """Format a single Python file and return the formatted code. Arguments: filename: (unicode) The file to reformat. remaining arguments: see comment at the top of this module. Returns: The reformatted code or None if the file doesn't exist. """ _CheckPythonVersion() original_source, encoding = ReadFile(filename, logging.warning) if original_source is None: return None, encoding return FormatCode(original_source, style_config=style_config, filename=filename, lines=lines, print_diff=print_diff, verify=verify), encoding def FormatCode(unformatted_source, filename='<unknown>', style_config=None, lines=None, print_diff=False, verify=True): """Format a string of Python code. This provides an alternative entry point to YAPF. Arguments: unformatted_source: (unicode) The code to format. filename: (unicode) The name of the file being reformatted. remaining arguments: see comment at the top of this module. Returns: The code reformatted to conform to the desired formatting style. """ _CheckPythonVersion() style.SetGlobalStyle(style.CreateStyleFromConfig(style_config)) if not unformatted_source.endswith('\n'): unformatted_source += '\n' tree = pytree_utils.ParseCodeToTree(unformatted_source) # Run passes on the tree, modifying it in place. comment_splicer.SpliceComments(tree) continuation_splicer.SpliceContinuations(tree) subtype_assigner.AssignSubtypes(tree) split_penalty.ComputeSplitPenalties(tree) blank_line_calculator.CalculateBlankLines(tree) uwlines = pytree_unwrapper.UnwrapPyTree(tree) if not uwlines: return '' for uwl in uwlines: uwl.CalculateFormattingInformation() _MarkLinesToFormat(uwlines, lines) reformatted_source = reformatter.Reformat(uwlines, verify) if unformatted_source == reformatted_source: return '' if print_diff else reformatted_source code_diff = _GetUnifiedDiff(unformatted_source, reformatted_source, filename=filename) if print_diff: return code_diff return reformatted_source def _CheckPythonVersion(): errmsg = 'yapf is only supported for Python 2.7 or 3.4+' if sys.version_info[0] == 2: if sys.version_info[1] < 7: raise RuntimeError(errmsg) elif sys.version_info[0] == 3: if sys.version_info[1] < 4: raise RuntimeError(errmsg) def ReadFile(filename, logger=None): """Read the contents of the file. An optional logger can be specified to emit messages to your favorite logging stream. If specified, then no exception is raised. This is external so that it can be used by third-party applications. Arguments: filename: (unicode) The name of the file. logger: (function) A function or lambda that takes a string and emits it. Returns: The contents of filename. Raises: IOError: raised during an error if a logger is not specified. """ try: with open(filename, 'rb') as fd: encoding = tokenize.detect_encoding(fd.readline)[0] except IOError as err: if logger: logger(err) else: raise try: with py3compat.open_with_encoding(filename, mode='r', encoding=encoding) as fd: source = fd.read() return source, encoding except IOError as err: if logger: logger(err) else: raise DISABLE_PATTERN = r'^#+ +yapf: *disable$' ENABLE_PATTERN = r'^#+ +yapf: *enable$' def _MarkLinesToFormat(uwlines, lines): """Skip sections of code that we shouldn't reformat.""" if lines: for uwline in uwlines: uwline.disable = True for start, end in sorted(lines): for uwline in uwlines: if uwline.lineno > end: break if uwline.lineno >= start: uwline.disable = False index = 0 while index < len(uwlines): uwline = uwlines[index] if uwline.is_comment: if re.search(DISABLE_PATTERN, uwline.first.value.strip(), re.IGNORECASE): while index < len(uwlines): uwline = uwlines[index] uwline.disable = True if (uwline.is_comment and re.search(ENABLE_PATTERN, uwline.first.value.strip(), re.IGNORECASE)): break index += 1 elif re.search(DISABLE_PATTERN, uwline.last.value.strip(), re.IGNORECASE): uwline.disable = True index += 1 def _GetUnifiedDiff(before, after, filename='code'): """Get a unified diff of the changes. Arguments: before: (unicode) The original source code. after: (unicode) The reformatted source code. filename: (unicode) The code's filename. Returns: The unified diff text. """ before = before.splitlines() after = after.splitlines() return '\n'.join(difflib.unified_diff(before, after, filename, filename, '(original)', '(reformatted)', lineterm='')) + '\n'
Python
0
@@ -5336,36 +5336,24 @@ logger(err)%0A - else:%0A raise%0A%0A @@ -5595,20 +5595,8 @@ rr)%0A - else:%0A
6eac3c570ce9a613071d09f675e65d4abcea95c5
Disable AppAuthentication, requires further testing
mezzanine_api/views.py
mezzanine_api/views.py
from django.contrib.auth import get_user_model from django.contrib.sites.models import Site from mezzanine.blog.models import BlogPost as Post, BlogCategory from mezzanine.pages.models import Page from rest_framework import viewsets, filters, permissions, mixins import django_filters from django_filters.rest_framework import DjangoFilterBackend from .serializers import UserSerializer, CategorySerializer, PageSerializer, SiteSerializer from .serializers import PostCreateSerializer, PostUpdateSerializer, PostOutputSerializer from .permissions import IsAdminOrReadOnly, IsAppAuthenticated from .pagination import MezzaninePagination, PostPagination from .mixins import PutUpdateModelMixin # Supports custom user models User = get_user_model() class ListViewSet(mixins.ListModelMixin, viewsets.GenericViewSet): """ A viewset that provides only `list` actions. To use it, override the class and set the `.queryset` and `.serializer_class` attributes. """ pass class SiteViewSet(ListViewSet): """ For retrieving site title, tagline and domain. """ queryset = Site.objects.all() serializer_class = SiteSerializer class UserFilter(django_filters.FilterSet): """ A class for filtering users. """ username = django_filters.CharFilter(field_name="username", lookup_expr='istartswith') class Meta: model = User fields = ['username'] class UserViewSet(viewsets.ReadOnlyModelViewSet): """ For listing or retrieving users. --- list: parameters: - name: username type: string description: Filter usernames starting with query paramType: query - name: page type: integer description: Page number paramType: query """ queryset = User.objects.all() filterset_class = UserFilter filter_backends = (DjangoFilterBackend,) serializer_class = UserSerializer pagination_class = MezzaninePagination permission_classes = (permissions.IsAdminUser,) class PageFilter(django_filters.FilterSet): """ A class for filtering pages by title. """ title = django_filters.CharFilter(field_name="title") class Meta: model = Page fields = ['title'] class PageViewSet(viewsets.ReadOnlyModelViewSet): """ For listing or retrieving pages. --- list: parameters: - name: page type: integer description: Page number paramType: query """ queryset = Page.objects.published() serializer_class = PageSerializer pagination_class = MezzaninePagination filter_backends = (DjangoFilterBackend, filters.OrderingFilter,) filterset_class = PageFilter ordering_fields = ('id', 'parent', 'title',) ordering = ('title',) def get_queryset(self): queryset = self.queryset user = self.request.user if user and not user.is_authenticated(): queryset = queryset.filter(login_required=False) return queryset class CategoryViewSet(mixins.CreateModelMixin, mixins.RetrieveModelMixin, PutUpdateModelMixin, mixins.ListModelMixin, viewsets.GenericViewSet): """ For listing, retrieving, creating or updating blog categories. --- list: parameters: - name: search type: string description: Search for category names that match the query paramType: query - name: page type: integer description: Page number paramType: query """ queryset = BlogCategory.objects.all() serializer_class = CategorySerializer pagination_class = MezzaninePagination permission_classes = [IsAdminOrReadOnly, IsAppAuthenticated] filter_backends = (filters.OrderingFilter, filters.SearchFilter,) ordering_fields = ('id', 'title',) ordering = ('title',) search_fields = ('title',) class CharInFilter(django_filters.BaseInFilter, django_filters.CharFilter): """ Enable multi-category filtering """ pass class PostFilter(django_filters.FilterSet): """ A class for filtering blog posts. """ category_id = django_filters.NumberFilter(field_name="categories__id") category_name = CharInFilter(field_name="categories__title", lookup_expr='in') category_slug = django_filters.CharFilter(field_name="categories__slug", lookup_expr='exact') tag = django_filters.CharFilter(field_name='keywords_string', lookup_expr='contains') author_id = django_filters.NumberFilter(field_name="user__id") author_name = django_filters.CharFilter(field_name="user__username", lookup_expr='istartswith') date_min = django_filters.DateFilter(field_name='publish_date', lookup_expr='gte') date_max = django_filters.DateFilter(field_name='publish_date', lookup_expr='lte') class Meta: model = Post fields = ['category_id', 'category_name', 'tag', 'author_id', 'author_name', 'date_min', 'date_max'] class PostViewSet(mixins.CreateModelMixin, mixins.RetrieveModelMixin, PutUpdateModelMixin, mixins.ListModelMixin, viewsets.GenericViewSet): """ For listing, retrieving, creating or updating blog posts. --- list: parameters: - name: category_id type: integer description: Filter posts by category ID paramType: query - name: category_name type: string description: Filter posts by category name paramType: query - name: category_slug type: string description: Filter posts by category slug paramType: query - name: tag type: string description: Filter posts by tag name paramType: query - name: author_id type: integer description: Filter posts by author ID paramType: query - name: author_name type: string description: Filter posts by author's username paramType: query - name: date_min type: datetime description: Filter posts by minimum publish date paramType: query - name: date_max type: datetime description: Filter posts by maximum publish date paramType: query - name: search type: string description: Search for blog posts that match the query paramType: query - name: page type: integer description: Page number paramType: query """ queryset = Post.objects.filter(status=2) pagination_class = PostPagination permission_classes = [IsAdminOrReadOnly, IsAppAuthenticated] filter_backends = (DjangoFilterBackend, filters.OrderingFilter, filters.SearchFilter,) filterset_class = PostFilter ordering_fields = ('id', 'title', 'publish_date', 'updated', 'user',) ordering = ('-publish_date',) search_fields = ('title', 'content',) def get_serializer_class(self): if self.request.method in ('PUT', 'PATCH'): return PostUpdateSerializer elif self.request.method == 'POST': return PostCreateSerializer else: return PostOutputSerializer
Python
0
@@ -558,32 +558,36 @@ sAdminOrReadOnly + # , IsAppAuthentic @@ -3894,33 +3894,36 @@ sAdminOrReadOnly -, +%5D # IsAppAuthentica @@ -3917,33 +3917,32 @@ AppAuthenticated -%5D %0A filter_back @@ -7076,17 +7076,20 @@ ReadOnly -, +%5D # IsAppAu @@ -7099,17 +7099,16 @@ nticated -%5D %0A fil
0051b5a5e287057cab06452d4f178e4c04cbd0c5
Put the win_osinfo classes in a helper function
salt/utils/win_osinfo.py
salt/utils/win_osinfo.py
# -*- coding: utf-8 -*- ''' Get Version information from Windows ''' # http://stackoverflow.com/questions/32300004/python-ctypes-getting-0-with-getversionex-function from __future__ import absolute_import # Import Third Party Libs import ctypes try: from ctypes.wintypes import BYTE, WORD, DWORD, WCHAR HAS_WIN32 = True except (ImportError, ValueError): HAS_WIN32 = False if HAS_WIN32: kernel32 = ctypes.WinDLL('kernel32', use_last_error=True) # Although utils are often directly imported, it is also possible to use the # loader. def __virtual__(): ''' Only load if Win32 Libraries are installed ''' if not HAS_WIN32: return False, 'This utility requires pywin32' return 'win_osinfo' if HAS_WIN32: class OSVERSIONINFO(ctypes.Structure): _fields_ = (('dwOSVersionInfoSize', DWORD), ('dwMajorVersion', DWORD), ('dwMinorVersion', DWORD), ('dwBuildNumber', DWORD), ('dwPlatformId', DWORD), ('szCSDVersion', WCHAR * 128)) def __init__(self, *args, **kwds): super(OSVERSIONINFO, self).__init__(*args, **kwds) self.dwOSVersionInfoSize = ctypes.sizeof(self) kernel32.GetVersionExW(ctypes.byref(self)) class OSVERSIONINFOEX(OSVERSIONINFO): _fields_ = (('wServicePackMajor', WORD), ('wServicePackMinor', WORD), ('wSuiteMask', WORD), ('wProductType', BYTE), ('wReserved', BYTE)) def errcheck_bool(result, func, args): if not result: raise ctypes.WinError(ctypes.get_last_error()) return args kernel32.GetVersionExW.errcheck = errcheck_bool kernel32.GetVersionExW.argtypes = (ctypes.POINTER(OSVERSIONINFO),) def get_os_version_info(): info = OSVERSIONINFOEX() ret = {'MajorVersion': info.dwMajorVersion, 'MinorVersion': info.dwMinorVersion, 'BuildNumber': info.dwBuildNumber, 'PlatformID': info.dwPlatformId, 'ServicePackMajor': info.wServicePackMajor, 'ServicePackMinor': info.wServicePackMinor, 'SuiteMask': info.wSuiteMask, 'ProductType': info.wProductType} return ret
Python
0.000002
@@ -733,45 +733,180 @@ '%0A%0A%0A -if HAS_WIN32:%0A +def os_version_info_ex():%0A '''%0A Helper function to return the OSVersionInfo class%0A%0A Returns:%0A + class - OSVERSIONINFO +: The OsVersionInfo class%0A '''%0A class OSVersionInfo (cty @@ -1274,26 +1274,26 @@ uper(OSV -ERSIONINFO +ersionInfo , self). @@ -1427,25 +1427,24 @@ ref(self))%0A%0A -%0A class OS @@ -1448,34 +1448,34 @@ OSV -ERSIONINFOEX(OSVERSIONINFO +ersionInfoEx(OSVersionInfo ):%0A @@ -1703,276 +1703,35 @@ ))%0A%0A -%0A def errcheck_bool(result, func, args):%0A if not result:%0A raise ctypes.WinError(ctypes.get_last_error())%0A return args%0A%0A kernel32.GetVersionExW.errcheck = errcheck_bool%0A kernel32.GetVersionExW.argtypes = (ctypes.POINTER(OSVERSIONINFO), + return OSVersionInfoEx( )%0A%0A%0A @@ -1772,23 +1772,26 @@ o = -OSVERSIONINFOEX +os_version_info_ex ()%0A
ed9be363029446638da5a1ae4ca6f5d1218615df
Send the keypair to the right place with the right name
saltcloud/clouds/hpcs.py
saltcloud/clouds/hpcs.py
''' HP Cloud Module ====================== The HP cloud module. This module uses the preferred means to set up a libcloud based cloud module and should be used as the general template for setting up additional libcloud based modules. The HP cloud module interfaces with the HP public cloud service and requires that two configuration paramaters be set for use: .. code-block:: yaml # The HP login user HPCLOUD.user: fred # The HP user's apikey HPCLOUD.apikey: 901d3f579h23c8v73q9 ''' # The import section is mostly libcloud boilerplate # Import python libs import os import sys import types import paramiko import tempfile import traceback # Import libcloud from libcloud.compute.types import Provider from libcloud.compute.providers import get_driver from libcloud.compute.deployment import MultiStepDeployment, ScriptDeployment, SSHKeyDeployment # Import generic libcloud functions from saltcloud.libcloudfuncs import * # Some of the libcloud functions need to be in the same namespace as the # functions defined in the module, so we create new function objects inside # this module namespace avail_images = types.FunctionType(avail_images.__code__, globals()) avail_sizes = types.FunctionType(avail_sizes.__code__, globals()) script = types.FunctionType(script.__code__, globals()) destroy = types.FunctionType(destroy.__code__, globals()) list_nodes = types.FunctionType(list_nodes.__code__, globals()) list_nodes_full = types.FunctionType(list_nodes_full.__code__, globals()) # Only load in this module is the HPCLOUD configurations are in place def __virtual__(): ''' Set up the libcloud functions and check for HPCLOUD configs ''' key_values = [ 'HPCLOUD.user' , 'HPCLOUD.apikey' , 'HPCLOUD.auth_endpoint' , 'HPCLOUD.region' , 'HPCLOUD.key_name' , 'HPCLOUD.tenant_name' ] have_values = 0 for value in key_values: if value in __opts__: have_values += 1 print have_values, len(key_values), value if have_values == len(key_values): return 'hpcs' return False def get_conn(): ''' Return a conn object for the passed vm data ''' driver = get_driver(Provider.OPENSTACK) return driver( __opts__['HPCLOUD.user'], __opts__['HPCLOUD.apikey'], ex_force_auth_url = __opts__['HPCLOUD.auth_endpoint'], ex_force_auth_version = '2.0_password', ex_force_service_name = 'Compute', ex_force_service_region = __opts__['HPCLOUD.region'], ex_key_name = __opts__['HPCLOUD.key_name'], ex_tenant_name = __opts__['HPCLOUD.tenant_name'] ) def create(vm_): ''' Create a single vm from a data dict ''' print('Creating Cloud VM {0}'.format(vm_['name'])) conn = get_conn() deploy_script = script(vm_) kwargs = {} kwargs['name'] = vm_['name'] kwargs['image'] = get_image(conn, vm_) if not kwargs['image']: err = ('Error creating {0} on HPCLOUD\n\n' 'Could not find image {1}\n').format( vm_['name'], vm_['image'] ) sys.stderr.write(err) return False kwargs['size'] = get_size(conn, vm_) if not kwargs['size']: err = ('Error creating {0} on HPCLOUD\n\n' 'Could not find size {1}\n').format( vm_['name'], vm_['size'] ) sys.stderr.write(err) return False try: data = conn.create_node(**kwargs) except Exception as exc: err = ('Error creating {0} on HPCLOUD\n\n' 'The following exception was thrown by libcloud when trying to ' 'run the initial deployment: \n{1}\n').format( vm_['name'], exc ) sys.stderr.write(err) print traceback.format_exc() return False # NOTE # We need to insert a wait / poll until we have # public ips for our node. Otherwise, we cannot # complete the next step of deploying a script to the new # server : ( if data.public_ips: host_addr = data.public_ips[0] else: host_addr = None deployed = saltcloud.utils.deploy_script( host=host_addr, username='root', password=data.extra['password'], script=deploy_script.script, name=vm_['name'], sock_dir=__opts__['sock_dir']) if deployed: print('Salt installed on {0}'.format(vm_['name'])) else: print('Failed to start Salt on Cloud VM {0}'.format(vm_['name'])) print('Created Cloud VM {0} with the following values:'.format( vm_['name'] )) for key, val in data.__dict__.items(): print(' {0}: {1}'.format(key, val))
Python
0.000003
@@ -1848,25 +1848,24 @@ 'HPCLOUD.key -_ name'%0A @@ -2618,64 +2618,8 @@ '%5D,%0A - ex_key_name = __opts__%5B'HPCLOUD.key_name'%5D,%0A @@ -3513,24 +3513,79 @@ eturn False%0A + kwargs%5B'ex_keyname'%5D = __opts__%5B'HPCLOUD.keyname'%5D%0A try:%0A
be29826ded5f20f56a7996464a186ccc3f68c0d0
Switch the default backend from amqp:// (deprecated) to rpc://
openquake/engine/celeryconfig.py
openquake/engine/celeryconfig.py
# -*- coding: utf-8 -*- # vim: tabstop=4 shiftwidth=4 softtabstop=4 # # Copyright (C) 2010-2016 GEM Foundation # # OpenQuake is free software: you can redistribute it and/or modify it # under the terms of the GNU Affero General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # OpenQuake is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with OpenQuake. If not, see <http://www.gnu.org/licenses/>. """ Config for all installed OpenQuake binaries and modules. Should be installed by setup.py into /etc/openquake eventually. """ import os import sys if '--with-doctest' in sys.argv: # horrible hack for nosetests pass # don't set OQ_DISTRIBUTE else: os.environ["OQ_DISTRIBUTE"] = "celery" # just in the case that are you using oq-engine from sources # with the rest of oq libraries installed into the system (or a # virtual environment) you must set this environment variable if os.environ.get("OQ_ENGINE_USE_SRCDIR"): sys.modules['openquake'].__dict__["__path__"].insert( 0, os.path.join(os.path.dirname(__file__), "openquake")) from openquake.engine import config config.abort_if_no_config_available() amqp = config.get_section("amqp") # RabbitMQ broker (default) BROKER_URL = 'amqp://%(user)s:%(password)s@%(host)s:%(port)s/%(vhost)s' % \ amqp # Redis broker (works only on Trusty) # BROKER_URL = 'redis://%(host)s:6379/0' % amqp # BROKER_POOL_LIMIT enables a connections pool so Celery can reuse # a single connection to RabbitMQ. Value 10 is the default from # Celery 2.5 where this feature is enabled by default. # Actually disabled because it's not stable in production. # See https://bugs.launchpad.net/oq-engine/+bug/1250402 BROKER_POOL_LIMIT = None # RabbitMQ result backend (default) CELERY_RESULT_BACKEND = 'amqp://' # Redis result backend (works only on Trusty) # CELERY_RESULT_BACKEND = 'redis://%(host)s:6379/0' % amqp # CELERY_ACKS_LATE and CELERYD_PREFETCH_MULTIPLIER settings help evenly # distribute tasks across the cluster. This configuration is intended # make worker processes reserve only a single task at any given time. # (The default settings for prefetching define that each worker process will # reserve 4 tasks at once. For long running calculations with lots of long, # heavy tasks, this greedy prefetching is not recommended and can result in # performance issues with respect to cluster utilization.) # CELERY_MAX_CACHED_RESULTS disable the cache on the results: this means # that map_reduce will not leak memory by keeping the intermediate results CELERY_ACKS_LATE = True CELERYD_PREFETCH_MULTIPLIER = 1 CELERY_MAX_CACHED_RESULTS = 1 CELERY_ACCEPT_CONTENT = ['pickle', 'json'] CELERY_IMPORTS = ["openquake.commonlib.parallel"]
Python
0
@@ -2058,24 +2058,20 @@ None%0A%0A# -Rabbit +A MQ +P result @@ -2117,16 +2117,48 @@ = ' -amqp://' +rpc://'%0ACELERY_RESULT_PERSISTENT = False %0A%0A#
ee8581536ec36e0e78bf5461fb4dc0365f454943
Revert redirection to /dev/null (not needed anymore)
saltcontainers/models.py
saltcontainers/models.py
import re import json import yaml import tarfile import logging import subprocess from utils import retry, load_json logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) class ContainerModel(dict): @retry() def run(self, command, stream=False): return self['config']['client'].run( self['config']['name'], command, stream=stream) def get_suse_release(self): info = dict() content = self.run('cat /etc/SuSE-release') for line in content.split('\n'): match = re.match('([a-zA-Z]+)\s*=\s*(\d+)', line) if match: info.update([[match.group(1), int(match.group(2))]]) return info def get_os_release(self): content = self.run('cat /etc/os-release') return dict( filter( lambda it: len(it) == 2, [it.replace('"', '').strip().split('=') for it in content.split('\n')] ) ) def connect(self): for item in self['config']['networking_config']['EndpointsConfig'].keys(): self['config']['client'].connect_container_to_network(self['config']['name'], item) def disconnect(self): for item in self['config']['networking_config']['EndpointsConfig'].keys(): self['config']['client'].disconnect_container_from_network(self['config']['name'], item) def remove(self): self['config']['client'].stop(self['config']['name']) self['config']['client'].remove_container( self['config']['name'], v=True) class BaseModel(dict): def salt_call(self, salt_command, *args): command = "salt-call {0} {1} --output=json -l quiet 2>/dev/null".format( salt_command, ' '.join(args) ) raw = self['container'].run(command) try: out = json.loads(raw) except ValueError: raise Exception(raw) return out['local'] def start(self): self['container'].run(self['cmd']) class MasterModel(BaseModel): def salt_key_raw(self, *args): command = ['salt-key'] command.extend(args) command.append('--output=json') return self['container'].run(' '.join(command)) def salt_key(self, *args): return json.loads(self.salt_key_raw(*args)) def salt_key_accept(self, minion_id): return self.salt_key_raw('-a', minion_id, '-y') def salt(self, minion_id, salt_command, *args): command = "salt {0} {1} --output=json -l quiet 2>/dev/null".format( minion_id, salt_command, ' '.join(args)) data = self['container'].run(command) return load_json(data) def salt_run(self, command, *args): docker_command = "salt-run {0} {1} --output=json -l quiet 2>/dev/null".format( command, ' '.join(args)) data = self['container'].run(docker_command) return load_json(data) def salt_ssh(self, target, cmd): roster = self['container']['config']['salt_config']['roster'] target_id = target['config']['name'] SSH = "salt-ssh -l quiet -i --out json --key-deploy --passwd {0} {1} {{0}} 2>/dev/null".format( target['ssh_config']['password'], target_id) data = self['container'].run(SSH.format(cmd)) return load_json(data)[target_id] def update_roster(self): roster = self['container']['config']['salt_config']['root'] / 'roster' content = {} for item in self['container']['config']['salt_config']['roster']: content[item['config']['name']] = { "host": item["ip"], "user": item['ssh_config']['user'], "password": item['ssh_config']['password'] } roster.write(yaml.safe_dump(content, default_flow_style=False)) self['container']['config']['client'].copy_to(self, roster.strpath, '/etc/salt/') class MinionModel(BaseModel): def stop(self): self['container'].run('pkill salt-minion')
Python
0
@@ -1685,36 +1685,24 @@ son -l quiet - 2%3E/dev/null %22.format(%0A @@ -2505,36 +2505,24 @@ son -l quiet - 2%3E/dev/null %22.format(%0A @@ -2755,28 +2755,16 @@ -l quiet - 2%3E/dev/null %22.format @@ -3125,20 +3125,8 @@ %7B0%7D%7D - 2%3E/dev/null %22.fo
c2f7b71276fbf103aabf008d2ab2cacacfa3e9e7
Remove debug output
src/gmail_launcher.py
src/gmail_launcher.py
import argparse import os import sys import httplib2 from apiclient.discovery import build from oauth2client.client import flow_from_clientsecrets, OAuth2Credentials from oauth2client.tools import run from workflow import Workflow, PasswordNotFound import config def execute(wf): parser = argparse.ArgumentParser() parser.add_argument( '--mark-as-read', dest='mark_as_read', action='store_true', default=None) parser.add_argument( '--archive-conversation', dest='archive_conversation', action='store_true', default=None) parser.add_argument( '--trash-mail', dest='trash_message', action='store_true', default=None) parser.add_argument( '--trash-conversation', dest='trash_conversation', action='store_true', default=None) parser.add_argument( '--deauthorize', dest='deauthorize', action='store_true', default=None) parser.add_argument('query', nargs='?', default=None) args = parser.parse_args(wf.args) # Start the OAuth flow to retrieve credentials flow = flow_from_clientsecrets( config.CLIENT_SECRET_FILE, scope=config.OAUTH_SCOPE) http = httplib2.Http() try: credentials = OAuth2Credentials.from_json( wf.get_password('gmail_credentials')) if credentials is None or credentials.invalid: credentials = run(flow, PseudoStorage(), http=http) wf.save_password('gmail_credentials', credentials.to_json()) # Authorize the httplib2.Http object with our credentials http = credentials.authorize(http) # Build the Gmail service from discovery service = build('gmail', 'v1', http=http) except PasswordNotFound: wf.logger.error('Credentials not found') if args.query is not None: query = args.query.split() if args.deauthorize: wf.delete_password('gmail_credentials') print "Workflow deauthorized." return 0 if len(query) < 2: return 0 thread_id = query[0] message_id = query[1] if args.mark_as_read: print mark_conversation_as_read(thread_id, service) return 0 elif args.archive_conversation: print archive_conversation(thread_id, service) return 0 elif args.trash_message: print trash_message(message_id, service) return 0 elif args.trash_conversation: print trash_conversation(thread_id, service) return 0 else: open_message(wf, message_id) return 0 def open_message(wf, message_id): url = 'https://mail.google.com/mail/u/0/?ui=2&pli=1#inbox/%s' % message_id wf.logger.debug(url) os.system('open "%s"' % url) def mark_conversation_as_read(thread_id, service): try: # Archive conversation thread = service.users().threads().modify( userId='me', id=thread_id, body={'removeLabelIds': ['UNREAD']}).execute() if all(u'labelIds' in message and u'UNREAD' not in message['labelIds'] for message in thread['messages']): return 'Conversation marked as read.' else: return 'An error occurred.' except Exception: return 'Connection error' def archive_conversation(thread_id, service): try: # Archive conversation thread = service.users().threads().modify( userId='me', id=thread_id, body={'removeLabelIds': ['INBOX']}).execute() if all(u'labelIds' in message and u'INBOX' not in message['labelIds'] for message in thread['messages']): return 'Conversation archived.' else: return 'An error occurred.' except Exception: return 'Connection error' def trash_message(message_id, service): try: # Trash message message = service.users().messages().trash( userId='me', id=message_id).execute() if u'labelIds' in message and u'TRASH' in message['labelIds']: return 'Mail moved to trash.' else: return 'An error occurred.' except Exception: return 'Connection error' def trash_conversation(thread_id, service): try: # Trash conversation thread = service.users().threads().trash( userId='me', id=thread_id).execute() if all(u'labelIds' in message and u'TRASH' in message['labelIds'] for message in thread['messages']): return 'Conversation moved to trash.' else: return 'An error occurred.' except Exception: return 'Connection error' if __name__ == '__main__': wf = Workflow() sys.exit(wf.run(execute))
Python
0.000037
@@ -2715,33 +2715,8 @@ _id%0A - wf.logger.debug(url)%0A
f20699b6154ce9d5c659a15d29737403234484b7
Fix annotations.
zerver/lib/statistics.py
zerver/lib/statistics.py
# -*- coding: utf-8 -*- from __future__ import absolute_import from __future__ import division from zerver.models import UserProfile, UserActivity, UserActivityInterval, Message from django.utils.timezone import utc from typing import Any from datetime import datetime, timedelta from itertools import chain from six.moves import range import six def median(data): # type: (List[float]) -> float data = sorted(data) size = len(data) if size % 2 == 1: return data[size//2] else: before = size//2 - 1 after = size//2 return (data[before] + data[after]) / 2.0 users_who_sent_query = Message.objects.select_related("sender") \ .exclude(sending_client__name__contains="mirror") \ .exclude(sending_client__name__contains="API") def active_users(): # type: () -> List[UserProfile] # Return a list of active users we want to count towards various # statistics. return UserProfile.objects.filter(is_bot=False, is_active=True).select_related() def users_who_sent_between(begin, end): # type: (datetime, datetime) -> Set[int] sender_objs = users_who_sent_query.filter(pub_date__gt=begin, pub_date__lt=end) \ .values("sender__id") return set(s["sender__id"] for s in sender_objs) def users_who_sent_ever(): # type: () -> Set[int] return set(s["sender__id"] for s in users_who_sent_query.values("sender__id")) def active_users_to_measure(): # type: () -> List[UserProfile] senders = users_who_sent_ever() return [u for u in active_users() if u.id in senders] def active_users_who_sent_between(begin, end): # type: (datetime, datetime) -> List[UserProfile] senders = users_who_sent_between(begin, end) return [u for u in active_users() if u.id in senders] # Return the amount of Zulip usage for this user between the two # given dates def seconds_usage_between(user_profile, begin, end): # type: (UserProfile, datetime, datetime) -> timedelta intervals = UserActivityInterval.objects.filter(user_profile=user_profile, end__gte=begin, start__lte=end) duration = timedelta(0) for interval in intervals: start = max(begin, interval.start) finish = min(end, interval.end) duration += finish-start return duration # Return a list of how many seconds each user has been engaging with the app on a given day def seconds_active_during_day(day): # type: (datetime) -> List[float] begin_day = day.replace(hour=0, minute=0, second=0, microsecond=0, tzinfo=utc) end_day = day.replace(hour=23, minute=59, second=59, microsecond=0, tzinfo=utc) active_users = active_users_to_measure() return [seconds_usage_between(user, begin_day, end_day).total_seconds() for user in active_users] def users_active_nosend_during_day(day): # type: (datetime) -> List[UserProfile] begin_day = day.replace(hour=0, minute=0, second=0, microsecond=0, tzinfo=utc) end_day = day.replace(hour=23, minute=59, second=59, microsecond=0, tzinfo=utc) active_users = active_users_to_measure() today_senders = users_who_sent_between(begin_day, end_day) today_users = [] for user_profile in active_users: intervals = UserActivityInterval.objects.filter(user_profile=user_profile, end__gte=begin_day, start__lte=end_day) if len(intervals) != 0: today_users.append(user_profile) return [u for u in today_users if u.id not in today_senders] def calculate_stats(data, all_users): # type: (List[float], List[UserProfile]) -> Dict[str, Any] if len(data) == 0: return {"# data points": 0} active_user_count = len([x for x in data if x > 1]) mean_data = sum(data) // active_user_count median_data = median([x for x in data if x > 1]) return {'active users': active_user_count, 'total users': len(all_users), 'mean': str(timedelta(seconds=mean_data)), 'median': str(timedelta(seconds=median_data)), '# data points': len(data)} # Return an info dict {mean: , median} containing the mean/median seconds users were active on a given day def activity_averages_during_day(day): # type: (datetime) -> Dict[str, Any] users_to_measure = active_users_to_measure() seconds_active = seconds_active_during_day(day) return calculate_stats(seconds_active, all_users=users_to_measure) # Returns an info dict {mean: , median} with engagement numbers for all users according # to active_users_to_measure. def activity_averages_between(begin, end, by_day=True): # type: (datetime, datetime, bool) -> Dict[str, Any] seconds_active = {} users_to_measure = active_users_to_measure() for i in range((end - begin).days): day = begin + timedelta(days=i) # Ignore weekends if day.weekday() in [5, 6]: continue seconds_active[day] = seconds_active_during_day(day) if by_day: return dict((str(day), calculate_stats(values, all_users=users_to_measure)) for day, values in six.iteritems(seconds_active)) else: return calculate_stats(list(chain.from_iterable(seconds_active.values())), # type: ignore # chain.from_iterable needs overload all_users=users_to_measure)
Python
0
@@ -233,16 +233,43 @@ port Any +, Dict, List, Sequence, Set %0A%0Afrom d @@ -399,28 +399,32 @@ # type: ( -List +Sequence %5Bfloat%5D) -%3E @@ -852,36 +852,40 @@ # type: () -%3E -List +Sequence %5BUserProfile%5D%0A @@ -3180,16 +3180,42 @@ ers = %5B%5D + # type: List%5BUserProfile%5D %0A for @@ -3674,20 +3674,24 @@ type: ( -List +Sequence %5Bfloat%5D, @@ -3691,20 +3691,24 @@ float%5D, -List +Sequence %5BUserPro
e79e0240165bf2aa77612be2f1227ca7bb3c5fc7
add empty return docs
lib/ansible/modules/extras/system/make.py
lib/ansible/modules/extras/system/make.py
#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2015, Linus Unnebäck <linus@folkdatorn.se> # # This file is part of Ansible # # This module is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This software is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this software. If not, see <http://www.gnu.org/licenses/>. # import module snippets from ansible.module_utils.basic import * DOCUMENTATION = ''' --- module: make short_description: Run targets in a Makefile requirements: [] version_added: "2.1" author: Linus Unnebäck (@LinusU) <linus@folkdatorn.se> description: Run targets in a Makefile. options: target: description: The target to run required: false params: description: Any extra parameters to pass to make required: false chdir: description: cd into this directory before running make required: true ''' EXAMPLES = ''' # Build the default target - make: chdir=/home/ubuntu/cool-project # Run `install` target as root - make: chdir=/home/ubuntu/cool-project target=install become: yes # Pass in extra arguments to build - make: chdir: /home/ubuntu/cool-project target: all params: NUM_THREADS: 4 BACKEND: lapack ''' def format_params(params): return [k + '=' + str(v) for k, v in params.iteritems()] def push_arguments(cmd, args): if args['target'] != None: cmd.append(args['target']) if args['params'] != None: cmd.extend(format_params(args['params'])) return cmd def check_changed(make_path, module, args): cmd = push_arguments([make_path, '--question'], args) rc, _, __ = module.run_command(cmd, check_rc=False, cwd=args['chdir']) return (rc != 0) def run_make(make_path, module, args): cmd = push_arguments([make_path], args) module.run_command(cmd, check_rc=True, cwd=args['chdir']) def main(): module = AnsibleModule( supports_check_mode=True, argument_spec=dict( target=dict(required=False, default=None, type='str'), params=dict(required=False, default=None, type='dict'), chdir=dict(required=True, default=None, type='str'), ), ) args = dict( changed=False, failed=False, target=module.params['target'], params=module.params['params'], chdir=module.params['chdir'], ) make_path = module.get_bin_path('make', True) # Check if target is up to date args['changed'] = check_changed(make_path, module, args) # Check only; don't modify if module.check_mode: module.exit_json(changed=args['changed']) # Target is already up to date if args['changed'] == False: module.exit_json(**args) run_make(make_path, module, args) module.exit_json(**args) if __name__ == '__main__': main()
Python
0.000926
@@ -1629,16 +1629,125 @@ ck%0A'''%0A%0A +# TODO: Disabled the RETURN as it was breaking docs building. Someone needs to%0A# fix this%0ARETURN = '''# '''%0A%0A %0Adef for
08202533755ed0094518bb31341c4d094e55759e
Create RepositoryConfiguratorCs and run it
projects/epicycle.derkonfigurator-py/epicycle/derkonfigurator/repository/Repository.py
projects/epicycle.derkonfigurator-py/epicycle/derkonfigurator/repository/Repository.py
""" Contains the Repository class @author: Dima Potekhin """ import os from epicycle.derkonfigurator.utils import nget from epicycle.derkonfigurator.WorkspaceEntity import WorkspaceEntity from epicycle.derkonfigurator.externals.ExternalsManager import ExternalsManager from epicycle.derkonfigurator.packaging.NuGetPackager import NuGetPackager from epicycle.derkonfigurator.project.Project import Project class Repository(WorkspaceEntity): DEFAULT_VERSION = "0.0.0.0" CONFIG_FILE_NAME = "repository_config.yaml" EXTERNALS_DIR = "externals" PROJECTS_DIR = "projects" def __init__(self, workspace, path): super(Repository, self).__init__(path, workspace.environment, workspace, workspace.reporter) self._config = self.directory.read_yaml(Repository.CONFIG_FILE_NAME) self._name = os.path.split(path)[1] version_data = self.directory.read_unicode_file("version") self._version = version_data.strip() if version_data else Repository.DEFAULT_VERSION self._organization = nget(self._config, "organization", default="") self._product = nget(self._config, "product", default=self.name) self._copyright = nget(self._config, "copyright", default="") self._title = nget(self._config, "title", default="") self._license_url = nget(self._config, "license_url", default="") self._url = nget(self._config, "url", default="") self._description = nget(self._config, "description", default="") self._summary = nget(self._config, "summary", default="") self._release_notes = nget(self._config, "release_notes", default="") self._tags = nget(self._config, "tags", default="") self._source_infocomment = self.directory.read_unicode_file("comment") self._externals = ExternalsManager(self, Repository.EXTERNALS_DIR) self._projects = [] self._nuget_packager = NuGetPackager(self) @property def config(self): return self._config @property def name(self): return self._name @property def version(self): return self._version @property def organization(self): return self._organization @property def product(self): return self._product @property def copyright(self): return self._copyright @property def title(self): return self._title @property def license_url(self): return self._license_url @property def url(self): return self._url @property def description(self): return self._description @property def summary(self): return self._summary @property def release_notes(self): return self._release_notes @property def tags(self): return self._tags @property def source_infocomment(self): return self._source_infocomment @property def externals(self): return self._externals @property def projects(self): return self._projects def get_project(self, full_name): for project in self.projects: if project.full_name.lower() == full_name.lower(): return project return None def configure(self): self.report("Configuring the repository %s" % self.name) with self.report_sub_level(): self._load_externals() self._load_projects() self._resolve_project_references() self._flatten_dependencies() self._configure_projects() self._configure_packagers() def _load_externals(self): self._externals.load() def _load_projects(self): to_repository_relative_path = "../.." self.report("Loading projects") with self.report_sub_level(): for directory in self.directory.subdir(Repository.PROJECTS_DIR).list_subdirs_with_file(Project.CONFIG_FILE_NAME): self._projects.append(Project(self, directory.path, to_repository_relative_path)) self.report("Loaded %d projects" % len(self._projects)) def _resolve_project_references(self): self.report("Resolving project references") for project in self._projects: project.resolve_dependencies() def _flatten_dependencies(self): self.report("Flattening dependencies") for project in self._projects: project.flatten_dependencies() def _configure_projects(self): if not self._projects: self.report("No projects to configure!") return self.report("Configuring projects") with self.report_sub_level(): for project in self._projects: project.configure() def _configure_packagers(self): self.report("Configuring packagers") with self.report_sub_level(): self._nuget_packager.configure()
Python
0
@@ -401,16 +401,78 @@ Project%0A +from RepositoryConfiguratorCs import RepositoryConfiguratorCs%0A %0A%0Aclass @@ -1970,109 +1970,375 @@ lf._ -nuget_packager = NuGetPackager(self)%0A%0A @property%0A def config(self):%0A return self._config +configurator = self._create_configurator()%0A self._nuget_packager = NuGetPackager(self)%0A%0A def _create_configurator(self):%0A # Currently assuming a .NET configurator%0A return RepositoryConfiguratorCs(self)%0A%0A @property%0A def config(self):%0A return self._config%0A%0A @property%0A def configurator(self):%0A return self._configurator %0A%0A @@ -3895,32 +3895,74 @@ gure_projects()%0A + self.configurator.configure()%0A self
f81536c5e17180715b232ea0befedb16d0555873
use make -j{{JOB_CPUS}}
samples/build-android.py
samples/build-android.py
#!/usr/bin/env python import time, itertools from sci import Job job = Job(__name__, debug = True) # Parameters - to allow a GUI to easily list them branch = job.parameter("BRANCH", "Manifest branch", required = True) build_id_prefix = job.parameter("BUILD_ID_PREFIX", "The build ID prefix to use") manifest_file = job.parameter("MANIFEST_FILE", "Manifest Filename", default = "default.xml") products = job.parameter("PRODUCTS", "Products to build", type = "array") variants = job.parameter("VARIANTS", "Variants to build", type = "array", default = ["eng", "userdebug", "user"]) # This job works as follows <-- run single matrix job *3 --> # / get source -> build android -> zip \ # create -> create -> - get source -> build android -> zip - create # build id manifest \ get source -> build android -> zip / report # <<--------- run matrix jobs ---------->> @job.default(products) def get_products(): """A function that will be evaluated to get the default value for 'products' in case it's not specified""" if "donut" in branch(): return ["g1", "emulator"] if "eclair" in branch(): return ["droid", "nexus_one", "emulator"] if "gingerbread" in branch(): return ["nexus_one", "nexus_s", "emulator"] job.error("Don't know which products to build!") @job.default(build_id_prefix) def default_build_id_prefix(): return branch().upper().replace("-", "_") @job.step("Create Build ID") def create_build_id(): """A very simple step""" build_id = build_id_prefix() + "_" + time.strftime("%y%m%d_%H%M%S") return build_id @job.step("Create Static Manifest") def create_manifest(): """These commands will automatically run in a temporary directory that will be wiped once the entire job finishes""" job.run("repo init -u {{MANIFEST_URL}} -b {{BRANCH}} -m {{MANIFEST_FILE}}") job.run("repo sync --jobs={{SYNC_JOBS}}", name = "sync") job.run("repo manifest -r -o static_manifest.xml") # Upload the result of this step to the 'file storage node' job.artifacts.add("static_manifest.xml") @job.step("Get source code") def get_source(): job.run("repo init -u {{MANIFEST_URL}} -b {{BRANCH}}") job.run("cp static_manifest.xml .repo/manifest.xml") job.run("repo sync --jobs={{SYNC_JOBS}}") @job.step("Build Android") def build_android(): job.run(""" . build/envsetup.sh lunch {{PRODUCT}}-{{VARIANT}} make -j{{JOBS}}""", JOBS = job.var("JOB_CPUS") + 1) @job.step("ZIP resulted files") def zip_result(): zip_file = "result-{{BUILD_ID}}-{{PRODUCT}}-{{VARIANT}}.zip" input_files = "out/target/product/{{PRODUCT}}/*.img" job.artifacts.create_zip(zip_file, input_files) return job.format(zip_file) @job.step("Run single matrix job") def run_single_matrix_job(product, variant): """This job will be running on a separate machine, in parallel with a lot of other similar jobs. It will perform a few build steps.""" job.env["PRODUCT"] = product job.env["VARIANT"] = variant job.artifacts.get("static_manifest.xml") get_source() build_android() return zip_result() @job.step("Run matrix jobs") def run_matrix_jobs(): """The async function runs the step asynchronously on (possibly) another node. This step (run_matrix_jobs) will wait for all the detached jobs to finish before it returns, but in this case we call ajob.get() which blocks until that job completes, and returns the return value from that step - in this: 'run_single_matrix_job'""" comb = itertools.product(products(), variants()) async_jobs = [] for product, variant in comb: ajob = run_single_matrix_job.async(product, variant) async_jobs.append(ajob) for ajob in async_jobs: print("Result: " + ajob.get()) @job.step("Send Report") def send_report(): pass @job.main() def main(): """This is the job's entry point.""" job.env["BUILD_ID"] = create_build_id() job.description = "{{BUILD_ID}}" create_manifest() run_matrix_jobs() send_report() if __name__ == "__main__": job.start()
Python
0.000002
@@ -2583,58 +2583,18 @@ %7BJOB +_CPU S%7D%7D%22%22%22 -,%0A JOBS = job.var(%22JOB_CPUS%22) + 1 )%0A%0A%0A
1cc1df8c00a7a956b0a1207f99928f731714541a
add main path reminder in TaskLog
TaskList/TaskLog/TaskLog.py
TaskList/TaskLog/TaskLog.py
#!/usr/bin/python3.4 # -*-coding:Utf-8 -* '''module to manage task running log''' import xml.etree.ElementTree as xmlMod from TaskList.TaskLog.GroupLog import * from Preferences.PresetList.Preset.Preset import * from Preferences.PresetList.Preset.Metapreset import * class TaskLog: '''class to manage task running log''' def __init__(self, xml = None, pref = None, task = None): '''initialize task log object''' if xml is None: self.defaultInit(pref, task) else: self.fromXml(xml) def defaultInit(self, preferences, task): '''initialize Task log object by generating from the task settings''' self.presetName = task.preset if self.presetName == '[default]': self.presetName = preferences.presets.default self.preset = preferences.presets.getPreset(self.presetName).copy() if type(self.preset) is Preset: self.groups = [GroupLog(groupName = '[main]', preferences = preferences, task = task)] else: self.groups = [] for g in self.preset.groups.keys(): group = preferences.presets.renderlayers.groups[g] if group.isUsefull(task.info.scenes[task.scene]): self.groups.append(GroupLog(groupName = g, preferences = preferences, task = task)) default = GroupLog(groupName = '[default]', preferences = preferences, task = task) if len(default.renderlayers) > 0: self.groups.append(default) def fromXml(self, xml): '''initialize Task log object with saved log''' node = xml.find('preset') if node is None: node = xml.find('metapreset') self.presetName = node.get('alias') self.preset = Metapreset(xml = node) else: self.presetName = node.get('alias') self.preset = Preset(xml = node) self.groups = [] for node in xml.findall('group'): self.groups.append(GroupLog(xml = node)) def toXml(self): '''export task log into xml syntaxed string''' xml = '<log>\n' xml += self.preset.toXml(self.presetName) for g in self.groups: xml += g.toXml() xml += '</log>' return xml def print(self): '''A method to print task log''' def getGroup(self, g): '''a method to get a group by his name''' for group in self.groups: if g == group.name: return group
Python
0
@@ -811,16 +811,196 @@ py()%0A%09%09%0A +%09%09fileName = task.path.split('/').pop()%0A%09%09fileName = fileName%5B0:fileName.rfind('.blend')%5D%0A%09%09self.path = preferences.output.getMainPath(fileName, task.scene, self.presetName)%0A%09%09%0A%09%09%0A %09%09if typ
3d571b1c5602b405b264eb4a9dc316640fd9605a
Remove intermediate images while building the image
cekit/builders/docker_builder.py
cekit/builders/docker_builder.py
import docker import logging import os import re import yaml from cekit.builder import Builder from cekit.errors import CekitError from docker_squash.squash import Squash try: docker_client = docker.Client(version="1.22") except AttributeError: docker_client = docker.APIClient(version="1.22") logger = logging.getLogger('cekit') ANSI_ESCAPE = re.compile(r'\x1B\[[0-?]*[ -/]*[@-~]') class DockerBuilder(Builder): """This class wraps docker build command to build and image""" def __init__(self, build_engine, target, params=None): if not params: params = {} self._tags = params.get('tags', []) self._pull = params.get('pull', False) self._base = params.get('base') super(DockerBuilder, self).__init__(build_engine, target, params) def check_prerequisities(self): try: docker_client.images except Exception as ex: raise CekitError("Docker build engine needs docker with python bindings installed " " and configured, error: %s" % ex) def build(self, build_args=None): """After the source files are generated, the container image can be built. We're using Docker to build the image currently. """ args = {} args['path'] = os.path.join(self.target, 'image') args['tag'] = self._tags[0] args['pull'] = self._pull # Custom tags for the container image logger.debug("Building image with tags: '%s'" % "', '".join(self._tags)) logger.info("Building container image...") try: docker_layer_ids = [] out = docker_client.build(**args) build_log = [""] for line in out: if b'stream' in line: line = yaml.safe_load(line)['stream'] elif b'status' in line: line = yaml.safe_load(line)['status'] elif b'errorDetail' in line: line = yaml.safe_load(line)['errorDetail']['message'] raise CekitError("Image build failed: '%s'" % line) if line != build_log[-1]: # this prevents poluting cekit log with dowloading/extracting msgs log_msg = ANSI_ESCAPE.sub('', line).strip() for msg in log_msg.split('\n'): logger.info('Docker: %s' % msg) build_log.append(line) if '---> Running in ' in line: docker_layer_ids.append(line.split(' ')[-1].strip()) elif 'Successfully built ' in line: docker_layer_ids.append(line.split(' ')[-1].strip()) elif '---> Using cache' in build_log[-2]: docker_layer_ids.append(line.split(' ')[-1].strip()) self.squash_image(docker_layer_ids[-1]) for tag in self._tags[1:]: if ':' in tag: img_repo, img_tag = tag.split(":") docker_client.tag(self._tags[0], img_repo, tag=img_tag) else: docker_client.tag(self._tags[0], tag) logger.info("Image built and available under following tags: %s" % ", ".join(self._tags)) except Exception as ex: msg = "Image build failed, see logs above." if len(docker_layer_ids) >= 2: logger.error("You can look inside the failed image by running " "'docker run --rm -ti %s bash'" % docker_layer_ids[-2]) if "To enable Red Hat Subscription Management repositories:" in ' '.join(build_log) and \ not os.path.exists(os.path.join(self.target, 'image', 'repos')): msg = "Image build failed with a yum error and you don't " \ "have any yum repository configured, please check " \ "your image/module descriptor for proper repository " \ " definitions." raise CekitError(msg, ex) def squash_image(self, layer_id): logger.info("Squashing image %s..." % (layer_id)) # XXX: currently, cleanup throws a 409 error from the docker daemon. this needs to be investigated in docker_squash squash = Squash(docker=docker_client, log=logger, from_layer=self._base, image=layer_id, tag=self._tags[0], cleanup=False) squash.run()
Python
0.000056
@@ -1410,16 +1410,42 @@ lf._pull +%0A args%5B'rm'%5D = True %0A%0A