commit
stringlengths
40
40
subject
stringlengths
1
3.25k
old_file
stringlengths
4
311
new_file
stringlengths
4
311
old_contents
stringlengths
0
26.3k
lang
stringclasses
3 values
proba
float64
0
1
diff
stringlengths
0
7.82k
d11478f1ad2d6caf16aeff087f2399297eec83d2
Improve qrcode generation, add proper error message to generate_uri assertion about secret length
src/keybar/utils/totp.py
src/keybar/utils/totp.py
import io import urllib import time from base64 import b32encode from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.twofactor.totp import TOTP from cryptography.hazmat.primitives.hashes import SHA1 from django.http import HttpResponse from django.utils.encoding import force_bytes from qrcode import QRCode BASE_URI = 'otpauth://{key_type}/{issuer}:{user}?secret={secret}&issuer={issuer}' def generate_qr_code_response(request): user = request.user qrcode = QRCode() uri = generate_uri('totp', bytes(user.totp_secret), user.email, 'keybar') print(uri) qrcode.add_data(uri) qrcode.make(fit=True) img = qrcode.make_image() stream = io.BytesIO() img.save(stream) return HttpResponse(stream.getvalue(), content_type='image/png') def generate_uri(key_type, secret, user, issuer): """Generate a URI suitable for Google Authenticator. See: https://code.google.com/p/google-authenticator/wiki/KeyUriFormat """ # Google Authenticator breaks if the b32 encoded string contains a padding # thus force the key to be divisible by 5 octets so that we don't have any # padding markers. assert len(secret) % 5 == 0 return BASE_URI.format(**{ 'key_type': urllib.parse.quote(key_type), 'issuer': urllib.parse.quote(issuer), 'user': urllib.parse.quote(user), 'secret': urllib.parse.quote(b32encode(secret)), }) def verify_totp_code(user, code): totp = TOTP(bytes(user.totp_secret), 6, SHA1(), 30, backend=default_backend()) return totp.verify(force_bytes(code), time.time())
Python
0.000001
@@ -343,16 +343,61 @@ QRCode%0A +from qrcode.constants import ERROR_CORRECT_H%0A %0A%0ABASE_U @@ -559,16 +559,100 @@ QRCode( +%0A error_correction=ERROR_CORRECT_H,%0A box_size=6,%0A border=4%0A )%0A%0A u @@ -729,24 +729,8 @@ ')%0A%0A - print(uri)%0A%0A @@ -1328,16 +1328,45 @@ %25 5 == 0 +, 'secret not divisible by 5' %0A%0A re
7bbd2827671d54662a0b13e11a9475a0ea5dee67
Update apps/lecturers/models.py using f-string
apps/lecturers/models.py
apps/lecturers/models.py
# -*- coding: utf-8 -*- from __future__ import absolute_import, division, print_function, unicode_literals import os import re from datetime import datetime from django.conf import settings from django.core.validators import MaxValueValidator, MinValueValidator from django.db import models from apps.lecturers import managers def lecturer_directory_path(instance, filename): # file will be uploaded to MEDIA_ROOT/lecturers/<filename> return f"lecturers/{filename}" class Lecturer(models.Model): """A lecturer at HSR. If there is a photo of that lecturer, it should go into the media folder and the filename should be <abbreviation>.jpg. """ id = models.AutoField("HSR ID", primary_key=True) title = models.CharField("Titel", max_length=32, null=True, blank=True) last_name = models.CharField("Name", max_length=255) first_name = models.CharField("Vorname", max_length=255) abbreviation = models.CharField("Kürzel", max_length=10, unique=True) department = models.CharField("Abteilung", max_length=100, null=True, blank=True) function = models.CharField("Funktion", max_length=255, null=True, blank=True) main_area = models.CharField( "Fachschwerpunkt", max_length=255, null=True, blank=True ) subjects = models.CharField( max_length=50, null=True, blank=True ) # todo add to frontend email = models.EmailField(null=True, blank=True) office = models.CharField(max_length=20, null=True, blank=True) picture = models.ImageField( "Bild", upload_to=lecturer_directory_path, null=True, blank=True, ) objects = models.Manager() real_objects = managers.RealLecturerManager() def name(self): parts = [self.title, self.last_name, self.first_name] return " ".join(p for p in parts if p) def photo(self): """Try to see if a photo with the name <self.id>.jpg exists. If it does, return the corresponding URL. If it doesn't, return None.""" if self.picture: path = self.picture.name else: path = os.path.join("lecturers", "%s.jpg" % self.id) full_path = os.path.join(settings.MEDIA_ROOT, path) return path if os.path.exists(full_path) else None def oldphotos(self): """Try to see whether there are more pictures in the folder ``lecturers/old/<self.id>/``...""" path = os.path.join("lecturers", "old", str(self.id)) fullpath = os.path.join(settings.MEDIA_ROOT, path) oldphotos = [] if os.path.exists(fullpath): for filename in os.listdir(fullpath): if re.match(r"^[0-9]+\.jpg$", filename): filepath = os.path.join(path, filename) oldphotos.append(filepath) return oldphotos # TODO rename to _rating_avg def _avg_rating(self, category): """Calculate the average rating for the given category.""" qs = self.LecturerRating.filter(category=category) if qs.exists(): ratings = qs.values_list("rating", flat=True) return int(sum(ratings) / len(ratings) + 0.5) # always round .5 up return 0 def _rating_count(self, category): return self.LecturerRating.filter(category=category).count() def avg_rating_d(self): return self._avg_rating("d") def avg_rating_m(self): return self._avg_rating("m") def avg_rating_f(self): return self._avg_rating("f") def rating_count_d(self): return self._rating_count("d") def rating_count_m(self): return self._rating_count("m") def rating_count_f(self): return self._rating_count("f") def __str__(self): return "{} {}".format(self.last_name, self.first_name) class Meta: ordering = ["last_name"] class LecturerRating(models.Model): """A lecturer rating. Max 1 per user, category and lecturer.""" CATEGORY_CHOICES = (("d", "Didaktisch"), ("m", "Menschlich"), ("f", "Fachlich")) RATING_VALIDATORS = [MaxValueValidator(10), MinValueValidator(1)] user = models.ForeignKey( settings.AUTH_USER_MODEL, related_name="LecturerRating", null=True, on_delete=models.SET_NULL, ) lecturer = models.ForeignKey( Lecturer, related_name="LecturerRating", on_delete=models.CASCADE ) category = models.CharField(max_length=1, choices=CATEGORY_CHOICES, db_index=True) rating = models.PositiveSmallIntegerField( validators=RATING_VALIDATORS, db_index=True ) def __str__(self): return "%s %s%u" % (self.lecturer, self.category, self.rating) class Meta: unique_together = ("user", "lecturer", "category") class Quote(models.Model): """Lecturer quotes.""" author = models.ForeignKey( settings.AUTH_USER_MODEL, related_name="Quote", null=True, on_delete=models.SET_NULL, ) lecturer = models.ForeignKey( Lecturer, verbose_name="Dozent", related_name="Quote", on_delete=models.CASCADE ) date = models.DateTimeField(auto_now_add=True) quote = models.TextField("Zitat") comment = models.TextField("Bemerkung", default="", blank=True) def date_available(self): return self.date != datetime(1970, 1, 1) def vote_sum(self): """Add up and return all votes for this quote.""" up = self.QuoteVote.filter(vote=True).count() down = self.QuoteVote.filter(vote=False).count() return up - down def __str__(self): return "[{}] {}...".format(self.lecturer, self.quote[:30]) class Meta: ordering = ["-date"] get_latest_by = "date" class QuoteVote(models.Model): """Lecturer quote votes.""" user = models.ForeignKey( settings.AUTH_USER_MODEL, related_name="QuoteVote", null=True, on_delete=models.SET_NULL, ) quote = models.ForeignKey(Quote, related_name="QuoteVote", on_delete=models.CASCADE) vote = models.BooleanField(help_text="True = upvote, False = downvote") def __str__(self): fmt_args = self.user.username, "up" if self.vote else "down", self.quote.pk return "User %s votes %s quote %s" % fmt_args class Meta: unique_together = ("user", "quote") class Course(models.Model): """A possible degree course. At the moment only one lecturer is possible.""" id = models.IntegerField("Studiengang ID", primary_key=True) abbreviation = models.CharField("Abkürzung", max_length=10, unique=True) name = models.CharField("Titel", max_length=50) def __str__(self): return "{} ({})".format(self.name, self.abbreviation)
Python
0
@@ -2140,26 +2140,24 @@ s%22, -%22%25s.jpg%22 %25 +f%22%7B self.id +%7D.jpg%22 )%0A
fb7e5039033530f1e6064ea54fe952f0781f4da2
Bump version number for release.
coredata.py
coredata.py
#!/usr/bin/python3 -tt # Copyright 2012 Jussi Pakkanen # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # This file contains all data that must persist over multiple # invocations of Meson. It is roughly the same thing as # cmakecache. import pickle version = '0.2.0-research' class CoreData(): def __init__(self, options): self.version = version self.prefix = options.prefix self.libdir = options.libdir self.bindir = options.bindir self.includedir = options.includedir self.datadir = options.datadir self.mandir = options.mandir self.backend = options.backend self.buildtype = options.buildtype self.strip = options.strip self.coverage = options.coverage self.compilers = {} self.deps = {} self.ext_progs = {} self.ext_libs = {} def load(filename): obj = pickle.load(open(filename, 'rb')) if not isinstance(obj, CoreData): raise RuntimeError('Core data file is corrupted.') if obj.version != version: raise RuntimeError('Build tree has been generated with Meson version %s, which is incompatible with current version %s.'% (obj.version, version)) return obj def save(obj, filename): if obj.version != version: raise RuntimeError('Fatal version mismatch corruption.') pickle.dump(obj, open(filename, 'wb')) forbidden_target_names = {'clean': None, 'clean-gcno': None, 'clean-gcda': None, 'coverage-text': None, 'coverage-xml': None, 'coverage-html': None, 'phony': None, 'all': None, 'test': None, 'test-valgrind': None, 'install': None, 'build.ninja': None, }
Python
0
@@ -761,17 +761,8 @@ .2.0 --research '%0A%0Ac
37e19505c8f8847e5804c7fb2d95cb678b15fafe
Replace \r\n with \n in run scripts
code_court/courthouse/views/admin/languages.py
code_court/courthouse/views/admin/languages.py
import json import re import util from sqlalchemy.exc import IntegrityError from flask_login import login_required from flask import ( abort, Blueprint, current_app, flash, redirect, render_template, request, url_for, flash, ) languages = Blueprint('languages', __name__, template_folder='templates/language') class ModelMissingException(Exception): pass @languages.route("/", methods=["GET"], defaults={'page': 1}) @languages.route("/<int:page>", methods=["GET"]) @util.login_required("operator") def languages_view(page): """ The language view page Returns: a rendered language view template """ model = util.get_model() languages = model.Language.query.paginate(page, 30) return render_template("language/view.html", languages=languages) @languages.route("/add/", methods=["GET", "POST"], defaults={'lang_id': None}) @languages.route("/edit/<int:lang_id>/", methods=["GET"]) @util.login_required("operator") def languages_add(lang_id): """ Displays the language adding and updating page and accepts form submits from those pages. Params: lang_id (int): the lang to edit, if this is None a new lang will be made Returns: a rendered add/edit template or a redirect to the language view page """ model = util.get_model() if request.method == "GET": # display add form return display_lang_add_form(lang_id) elif request.method == "POST": # process added/edited lang return add_lang() else: current_app.logger.info("invalid lang add request method: %s", request.method) abort(400) @languages.route("/del/<lang_id>/", methods=["GET"]) @util.login_required("operator") def languages_del(lang_id): """ Deletes a language Params: lang_id (int): the language to delete Returns: a redirect to the language view page """ model = util.get_model() lang = model.Language.query.filter_by(id=lang_id).scalar() if lang is None: current_app.logger.info("Can't delete lang \'%s\' as it doesn't exist", lang.name) flash("Could not delete language \'{}\' as it does not exist.".format(lang.name), "danger") return redirect(url_for("languages.languages_view")) try: model.db.session.delete(lang) model.db.session.commit() flash("Deleted language \'{}\'".format(lang.name), "warning") except IntegrityError: model.db.session.rollback() current_app.logger.info("IntegrityError: Could not delete language \'{}\'.".format(lang.name)) flash("IntegrityError: Could not delete language \'{}\' as it is referenced in another element in the database.".format(lang.name), "danger") return redirect(url_for("languages.languages_view")) def add_lang(): """ Adds or edits a language Note: must be called from within a request context Returns: a redirect to the language view page """ model = util.get_model() name = request.form.get("name") syntax_mode = request.form.get("syntax_mode") is_enabled = request.form.get("is_enabled") run_script = request.form.get("run_script") if name is None: # TODO: give better feedback for failure current_app.logger.info("Undefined name when trying to add language") abort(400) if syntax_mode is None: # TODO: give better feedback for failure current_app.logger.info("Undefined syntax_mode when trying to add language") abort(400) # convert is_enabled to a bool is_enabled_bool = util.checkbox_result_to_bool(is_enabled) if is_enabled_bool is None: # TODO: give better feedback for failure current_app.logger.info("Invalid language is_enabled: %s", is_enabled) abort(400) lang_id = request.form.get('lang_id') if lang_id: # edit lang = model.Language.query.filter_by(id=lang_id).one() lang.name = name lang.syntax_mode = syntax_mode lang.is_enabled = is_enabled_bool lang.run_script = run_script else: # add # check if is duplicate if is_dup_lang_name(name): # TODO: give better feedback for failure current_app.logger.info("Tried to add a duplicate language: %s", name) abort(400) lang = model.Language(name, syntax_mode, is_enabled_bool, run_script) model.db.session.add(lang) model.db.session.commit() return redirect(url_for("languages.languages_view")) def display_lang_add_form(lang_id): """ Displays the language add template Params: lang_id (int): lang_id Returns: a rendered language add/edit template """ model = util.get_model() if lang_id is None: # add return render_template("language/add_edit.html", action_label="Add") else: # edit lang = model.Language.query.filter_by(id=lang_id).all() if len(lang) == 0: # TODO: give better feedback for failure current_app.logger.info("Tried to edit non-existant lang, id:%s", lang_id) abort(400) return render_template("language/add_edit.html", action_label="Edit", lang_id=lang_id, name=lang[0].name, syntax_mode=lang[0].syntax_mode, is_enabled=lang[0].is_enabled, run_script=lang[0].run_script) ## Util functions def is_dup_lang_name(name): """ Checks if a name is a duplicate of another lang Params: name (str): the lang name to test Returns: bool: True if the name is a duplicate, False otherwise """ model = util.get_model() dup_lang = model.Language.query.filter_by(name=name).scalar() if dup_lang: return True else: return False
Python
0.000294
@@ -3225,16 +3225,38 @@ script%22) +.replace('%5Cr%5Cn', '%5Cn') %0A%0A if
ffd19f324ee532c797dcd5bdbff21f8f3195d4cf
Add --skip_removals flag to roll_preload_list.py
scripts/roll_preload_list.py
scripts/roll_preload_list.py
import argparse import json import re import requests import sys def log(s): sys.stderr.write(s) class Chunk: BlankLine, CommentLine, OneLineEntry, Unknown = range(4) def getPendingRemovals(): log("Fetching pending removal...\n") return requests.get("https://hstspreload.org/api/v2/pending-removal").json() def getRawText(preloadListPath): log("Fetching preload list from Chromium source...\n") with open(preloadListPath, "r") as f: s = f.read() return s def getPendingScan(pendingDataFilePath): log("Fetching pending list from provided path...\n") log(" %s\n" % pendingDataFilePath) with open(pendingDataFilePath, "r") as f: return json.load(f) def domainsToPreload(pendingData, domainsToReject): numSkipping = 0 numPreloading = 0 for result in pendingData: if len(result["issues"]["errors"]) == 0: numPreloading += 1 yield result["domain"] else: errors = list(error["code"] for error in result["issues"]["errors"]) domainsToReject += [ {"domain": result["domain"], "errors": errors} ] numSkipping += 1 log("Pending entries preloaded: %d\n" % numPreloading) log("Pending entries rejected: %d\n" % numSkipping) def chunks(rawText): log("Chunking...\n") lines = iter(rawText.splitlines()) while True: try: chunk = next(lines) if chunk == "": yield chunk, Chunk.BlankLine continue elif re.match(r'^ *//.*', chunk): yield chunk, Chunk.CommentLine continue elif re.match(r'^ \{.*\},', chunk): yield chunk, Chunk.OneLineEntry else: yield chunk, Chunk.Unknown except StopIteration: break def update(pendingRemovals, pendingAdditions, entryStrings): log("Removing and adding entries...\n") removedCount = 0 for l, c in entryStrings: if c == Chunk.OneLineEntry: parsed = json.loads("[%s{}]" % l)[0] domain = parsed["name"] if domain in pendingRemovals: removedCount += 1 pendingRemovals.remove(domain) else: yield l elif l == " // END OF 1-YEAR BULK HSTS ENTRIES": for domain in sorted(pendingAdditions): yield ' { "name": "%s", "policy": "bulk-1-year", "mode": "force-https", "include_subdomains": true },' % domain yield l else: yield l log("Removed: %s\n" % removedCount) def write(file, output): log("Writing to %s...\n" % file) with open(file, 'w') as file: file.write(output) file.close() def getArgs(): parser = argparse.ArgumentParser(description='Roll the HSTS preload list (experimental).') parser.add_argument('preload_list_path', type=str) parser.add_argument('pending_scan_path', type=str) parser.add_argument('rejected_domains_path', type=str) return parser.parse_args() def parseJsonWithComments(rawText): s = "" for l, c in chunks(rawText): if c == Chunk.CommentLine: continue else: s += l + "\n" return json.loads(s) def checkForDupes(parsedList): log("Checking for duplicates...\n") seen = set() dupes = set() for entry in parsedList["entries"]: name = entry["name"] if name in seen: dupes.add(name) else: seen.add(name) return dupes def main(): args = getArgs() rawText = getRawText(args.preload_list_path) pendingRemovals = getPendingRemovals() domainsToReject = [] pendingAdditions = domainsToPreload(getPendingScan(args.pending_scan_path), domainsToReject) updated = update(pendingRemovals, pendingAdditions, chunks(rawText)) updatedText = "\n".join(updated) + "\n" dupes = checkForDupes(parseJsonWithComments(updatedText)) write(args.preload_list_path, updatedText) write(args.rejected_domains_path, json.dumps(domainsToReject, indent=2) + "\n") if dupes: print "\nWARNING\nDuplicate entries:" for dupe in dupes: print "- %s" % dupe print "\nYou'll need to manually deduplicate entries before commiting them to Chromium." print "\nNote: if there are a lot of duplicate entries, you may have accidentally run this script twice. Reset your checkout and try again." else: print "\nSUCCESS\n" if __name__ == "__main__": main()
Python
0
@@ -2769,16 +2769,78 @@ pe=str)%0A + parser.add_argument('--skip_removals', action='store_true')%0A return @@ -3388,16 +3388,70 @@ movals = + %5B%5D%0A if not args.skip_removals:%0A pendingRemovals = getPend
8e880fe763a063f2bd24e9787cb2d7308bbed453
Output debug not info
panoptes/environment/camera_enclosure.py
panoptes/environment/camera_enclosure.py
import datetime import zmq import json from . import monitor from panoptes.utils import logger, config, messaging, threads, serial @logger.has_logger @config.has_config class CameraEnclosure(monitor.EnvironmentalMonitor): """ Listens to the sensors inside the camera enclosure Args: messaging (panoptes.messaging.Messaging): A messaging Object for creating new sockets. """ def __init__(self, messaging=None, connect_on_startup=False): super().__init__(messaging=messaging, name='CameraEnclosure') # Get the class for getting data from serial sensor self.serial_port = self.config.get('camera_box').get('serial_port', '/dev/ttyACM0') self.messaging_port = self.config.get('camera_box').get('messaging_port', 6500) self.channel = self.config.get('camera_box').get('channel', 'camera_box') try: self.serial_reader = serial.SerialData(port=self.serial_port, threaded=True) except: self.logger.warning("Cannot connect to CameraEnclosure") self.socket = self.messaging.create_publisher(port=self.messaging_port) if connect_on_startup: try: self.serial_reader.connect() except: self.logger.warning("Cannot connect to CameraEnclosure via serial port") try: self.start_monitoring() except: self.logger.warning("Problem starting serial monitor") def monitor(self): """ Gets the next reading from the sensors in the camera enclosure """ sensor_data = self.get_reading() self.logger.info("camera_box: {}".format(sensor_data)) for key, value in sensor_data.items(): sensor_string = '{} {}'.format(key, value) self.send_message(sensor_string) def get_reading(self): """Get the serial reading from the sensor""" # take the current serial sensor information return self._prepare_sensor_data() def _prepare_sensor_data(self): """Helper function to return serial sensor info""" self.sensor_value = self.serial_reader.next() sensor_data = dict() if len(self.sensor_value) > 0: try: sensor_data = json.loads(self.sensor_value) except ValueError: print("Bad JSON: {0}".format(self.sensor_value)) return sensor_data
Python
0.999802
@@ -1654,20 +1654,21 @@ .logger. -info +debug (%22camera
0a1700b64a2e496217dd0531ebe8326410fd6cdc
Update yamldumper.py
salt/utils/yamldumper.py
salt/utils/yamldumper.py
# -*- coding: utf-8 -*- ''' salt.utils.yamldumper ~~~~~~~~~~~~~~~~~~~~~ ''' from __future__ import absolute_import try: from yaml import CDumper as Dumper except ImportError: from yaml import CDumper as Dumper from salt.utils.odict import OrderedDict class OrderedDumper(Dumper): ''' A YAML dumper that represents python OrderedDict as simple YAML map. ''' pass def represent_ordereddict(dumper, data): return dumper.represent_dict(data.items()) OrderedDumper.add_representer(OrderedDict, represent_ordereddict)
Python
0
@@ -199,35 +199,24 @@ yaml import -CDumper as Dumper%0A%0Afrom
91f54451fd149506abe57e31f45bc841f9e031ca
Fix unstoppable streaming
camerav4.py
camerav4.py
import picamera from picamera import PiCamera import time from datetime import datetime import os.path from subprocess32 import Popen print "\nSecurity Camera Logger v4 | Ben Broce & William Hampton\n" print "Streams video to rtsp://pi-ip:8554/ | Captures to pics/[timestamp].jpg" print "Ctrl-C quits.\n" stream = raw_input("Should I stream video or take pictures (v/p)? ") print "Running..." #http://www.raspberry-projects.com/pi/pi-hardware/raspberry-pi-camera/streaming-video-using-vlc-player #http://www.diveintopython.net/scripts_and_streams/stdin_stdout_stderr.html #Ouput video (record) => stream => stdout => | => cvlc livestream => browser if (stream == "v" or stream == "V"): Popen(["./livestream.sh"]) elif (stream == "p" or stream == "P"): length = float(raw_input("How long should I run (in minutes): "))*60 interval = float(raw_input("How often should I take a picture (in seconds): ")) camera = PiCamera() camera.annotate_background = picamera.Color('black') camera.rotation = 180 camera.resolution = (640, 480) counter = 0 try: camera.start_preview() while (counter <= length): timestamp = datetime.now().strftime("%m-%d-%Y_%H:%M:%S") camera.annotate_text = timestamp path = 'pics/' + timestamp + '.jpg' camera.capture(path, use_video_port=True) time.sleep(interval) counter += interval finally: print "Exiting..." camera.stop_preview() else: print "Invalid input!"
Python
0.000013
@@ -368,16 +368,84 @@ v/p)? %22) +%0Apreview = raw_input(%22Should I display video preview on Pi (y/n)? %22) %0A%0Aprint @@ -752,16 +752,23 @@ = %22V%22):%0A +%09try:%0A%09 %09Popen(%5B @@ -787,16 +787,71 @@ m.sh%22%5D)%0A +%09finally:%0A%09%09print %22%5Cn%5CnExiting...%22%0A%09%09Popen.terminate()%0A elif (st @@ -1187,16 +1187,58 @@ %09%0A%09try:%0A +%09%09if (preview == %22y%22 or preview == %22Y%22):%0A%09 %09%09camera @@ -1254,16 +1254,16 @@ eview()%0A - %09%09while @@ -1541,16 +1541,58 @@ ing...%22%0A +%09%09if (preview == %22y%22 or preview == %22Y%22):%0A%09 %09%09camera
aecf2a62d5e6a13dbe906d0d8cd3a2ddd554a21c
version bump 1.1.0 - Increase number of blacklisted domains
disposable_email_checker/__init__.py
disposable_email_checker/__init__.py
__version__ = '1.0.0'
Python
0
@@ -10,13 +10,13 @@ __ = '1. -0 +1 .0'%0A
59c193cdffa46a247e918a93b86c16fe050e27e9
migrate method to new api
l10n_br_delivery/sale.py
l10n_br_delivery/sale.py
# -*- coding: utf-8 -*- ############################################################################### # # # Copyright (C) 2009 Renato Lima - Akretion # # # #This program is free software: you can redistribute it and/or modify # #it under the terms of the GNU Affero General Public License as published by # #the Free Software Foundation, either version 3 of the License, or # #(at your option) any later version. # # # #This program is distributed in the hope that it will be useful, # #but WITHOUT ANY WARRANTY; without even the implied warranty of # #MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # #GNU Affero General Public License for more details. # # # #You should have received a copy of the GNU Affero General Public License # #along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################### import time from openerp import models, api, _ from openerp.exceptions import except_orm class SaleOrder(models.Model): _inherit = 'sale.order' # TODO migrate to new API def _prepare_invoice(self, cr, uid, order, lines, context=None): """Prepare the dict of values to create the new invoice for a sale order. This method may be overridden to implement custom invoice generation (making sure to call super() to establish a clean extension chain). :param browse_record order: sale.order record to invoice :param list(int) line: list of invoice line IDs that must be attached to the invoice :return: dict of value to create() the invoice """ result = super(SaleOrder, self)._prepare_invoice( cr, uid, order, lines, context) if order.carrier_id: result['carrier_id'] = order.carrier_id.id return result #TODO Inplement this in object stock.move method _picking_assign #def _prepare_order_picking(self, cr, uid, order, context=None): # result = super(SaleOrder, self)._prepare_order_picking( # cr, uid, order, context) # # # FIXME - Confirmado bug do OpenERP # # https://bugs.launchpad.net/bugs/1161138 # # Esse campo já deveria ser copiado pelo módulo nativo delivery # result['incoterm'] = order.incoterm and order.incoterm.id or False # return result # TODO migrate to new API def delivery_set(self, cr, uid, ids, context=None): #Copia do modulo delivery #Exceto pelo final que adiciona ao campo total do frete. grid_obj = self.pool.get('delivery.grid') carrier_obj = self.pool.get('delivery.carrier') for order in self.browse(cr, uid, ids, context=context): grid_id = carrier_obj.grid_get(cr, uid, [order.carrier_id.id], order.partner_shipping_id.id) if not grid_id: raise except_orm(_('No Grid Available!'), _('No grid matching for this carrier!')) if not order.state in ('draft'): raise except_orm(_('Order not in Draft State!'), _('The order state have to be draft to add delivery lines.')) grid = grid_obj.browse(cr, uid, grid_id, context=context) amount_freight = grid_obj.get_price(cr, uid, grid.id, order, time.strftime('%Y-%m-%d'), context) self.onchange_amount_freight(cr, uid, ids, amount_freight) return self.write(cr, uid, ids, {'amount_freight': amount_freight})
Python
0.000001
@@ -1538,33 +1538,18 @@ -# TODO migrate to new API +@api.model %0A @@ -1571,33 +1571,24 @@ nvoice(self, - cr, uid, order, line @@ -1588,30 +1588,16 @@ r, lines -, context=None ):%0A @@ -2173,30 +2173,8 @@ ice( -%0A cr, uid, orde @@ -2181,25 +2181,16 @@ r, lines -, context )%0A%0A
643b8e034f6bdcc2d863f0dda99fa91b1eecb54c
Update __init__.py
corner/__init__.py
corner/__init__.py
# -*- coding: utf-8 -*- __version__ = "2.0.2.dev0" __author__ = "Dan Foreman-Mackey (foreman.mackey@gmail.com)" __copyright__ = "Copyright 2013-2016 Daniel Foreman-Mackey and contributors" __contributors__ = [ # Alphabetical by first name. "Adrian Price-Whelan @adrn", "Brendon Brewer @eggplantbren", "Brigitta Sipocz @bsipocz", "Ekta Patel @ekta1224", "Emily Rice @emilurice", "Geoff Ryan @geoffryan", "Guillaume @ceyzeriat", "Gregory Ashton @ga7g08", "Hanno Rein @hannorein", "Kelle Cruz @kelle", "Kyle Barbary @kbarbary", "Marco Tazzari @mtazzari", "Matt Pitkin @mattpitkin", "Phil Marshall @drphilmarshall", "Pierre Gratier @pirg", "Stephan Hoyer @shoyer", "Víctor Zabalza @zblz", "Will Vousden @willvousden", "Wolfgang Kerzendorf @wkerzendorf", ] __bibtex__ = """@article{corner, Author = {Daniel Foreman-Mackey}, Doi = {10.21105/joss.00024}, Title = {corner.py: Scatterplot matrices in Python}, Journal = {The Journal of Open Source Software}, Year = 2016, Volume = 24, Url = {http://dx.doi.org/10.5281/zenodo.45906} }""" try: __CORNER_SETUP__ except NameError: __CORNER_SETUP__ = False if not __CORNER_SETUP__: __all__ = ["corner", "hist2d", "quantile"] from .corner import corner, hist2d, quantile
Python
0.000072
@@ -508,24 +508,51 @@ hannorein%22,%0A + %22Jeremy Heyl @jsheyl%22,%0A %22Kelle C
14efcc349a3b524345808eaf925399bede34c7c6
make file pep8 compliant
binstar_client/errors.py
binstar_client/errors.py
from clyent.errors import ClyentError class BinstarError(ClyentError): def __init__(self, *args, **kwargs): Exception.__init__(self, *args, **kwargs) if not hasattr(self, 'message'): self.message = args[0] if args else None class Unauthorized(BinstarError): pass class Conflict(BinstarError): pass class NotFound(BinstarError, IndexError): def __init__(self, *args, **kwargs): BinstarError.__init__(self, *args, **kwargs) IndexError.__init__(self, *args, **kwargs) self.message = args[0] class UserError(BinstarError): pass class ServerError(BinstarError): pass class ShowHelp(BinstarError): pass class NoMetadataError(BinstarError): pass class NotebookNotExist(BinstarError): def __init__(self, notebook): msg = "{} does not exist.".format(notebook) self.notebook = notebook super(BinstarError, self).__init__(msg)
Python
0
@@ -32,16 +32,17 @@ tError%0A%0A +%0A class Bi @@ -58,33 +58,32 @@ r(ClyentError):%0A -%0A def __init__ @@ -253,16 +253,17 @@ e None%0A%0A +%0A class Un @@ -294,24 +294,25 @@ :%0A pass%0A%0A +%0A class Confli @@ -335,24 +335,25 @@ :%0A pass%0A%0A +%0A class NotFou @@ -382,17 +382,16 @@ Error):%0A -%0A def @@ -560,16 +560,17 @@ rgs%5B0%5D%0A%0A +%0A class Us @@ -602,16 +602,17 @@ pass%0A%0A +%0A class Se
600fbdaff54206aaed93e775011b5dcfb054b83c
use url() for /apps urls so we can link to them
apps/marketplace/urls.py
apps/marketplace/urls.py
from django.conf.urls.defaults import * from views import marketplace, partners urlpatterns = patterns('', (r'^$', marketplace), (r'^partners/$', partners), )
Python
0
@@ -37,46 +37,20 @@ t *%0A -from views import marketplace, partner +%0Aimport view s%0A%0Au @@ -83,16 +83,19 @@ +url (r'%5E$', mark @@ -90,16 +90,22 @@ (r'%5E$', +views. marketpl @@ -111,15 +111,38 @@ lace +, name='marketplace' ),%0A +url (r'%5E @@ -154,21 +154,27 @@ ers/$', +views. partners ),%0A)%0A @@ -168,13 +168,29 @@ partners -) , + name='partners') %0A)%0A
38db6404a7f40bc86585c614fc8cbe6691eafb89
update doc
birdy/native/__init__.py
birdy/native/__init__.py
""" The :func:`import_wps` function *imports* on the fly a python module whose functions call WPS processes. The module is generated at runtime based on the process description provided by the WPS server. Calling a function sends an `execute` request to the server, which returns a response. The response is parsed to convert the outputs in native python whenever possible. `LiteralOutput` objects (string, float, integer, boolean) are automatically converted to their native format. For `ComplexOutput`, the module can either return a link to the output files stored on the server (default), or try to convert the outputs to a python object based on their mime type. So for example, if the mime type is 'application/json', the module would read the remote output file and `json.loads` it to return a `dict`. The behavior of the module can be configured using the :class:`config`, see its docstring for more information. Example ------- If a WPS server with a simple `hello` process is running on the local host on port 5000:: >>> from birdy import import_wps >>> emu = import_wps('http://localhost:5000/') >>> emu.hello <function birdy.native.hello(name)> >>> print(emu.hello.__doc__) "" Just says a friendly Hello. Returns a literal string output with Hello plus the inputed name. Parameters ---------- name : string Please enter your name. Returns ------- output : string A friendly Hello from us. "" # Call the function >>> emu.hello('stranger') 'Hello stranger' """ from .client import BirdyClient, import_wps
Python
0
@@ -24,75 +24,48 @@ ps%60 -function *imports* on the fly a python module whose%0Afunction +instantiates a class whose method s call - +%0A WPS @@ -84,16 +84,18 @@ he m -odule is +ethods are gen
270812e89e8e0870bfea01367cf645cf5194a806
Add sql constraint identation fixed
openacademy/model/openacademy_course.py
openacademy/model/openacademy_course.py
# -*- coding: utf-8 -*- from openerp import models, fields, api ''' This module create model of Course ''' class Course(models.Model): '''This class create model of Course''' _name = 'openacademy.course' # Model odoo name name = fields.Char(string='Title', required=True) # field reserved to identified rec_name description = fields.Text(string='Description') responsible_id = fields.Many2one('res.users', ondelete='set null', string="Responsible", index=True) session_ids = fields.One2many('openacademy.session', 'course_id', string="Sessions") @api.multi def copy(self, default=None): default = dict(default or {}) copied_count = self.search_count( [('name', '=like', u"Copy of {}%".format(self.name))]) if not copied_count: new_name = u"Copy of {}".format(self.name) else: new_name = u"Copy of {} ({})".format(self.name, copied_count) default['name'] = new_name return super(Course, self).copy(default) _sql_constraints = [ ('name_description_check', 'CHECK(name != description)', "The title of the course should not be the description"), ('name_unique', 'UNIQUE(name)', "The course title must be unique"), ]
Python
0
@@ -1202,17 +1202,16 @@ - 'CHECK(n @@ -1228,25 +1228,24 @@ cription)',%0A - %22The @@ -1331,17 +1331,16 @@ - 'UNIQUE( @@ -1343,25 +1343,24 @@ QUE(name)',%0A - %22The
80a58ea42892af72bac791e02f75fb1c9e11394e
handle piping errors
motmot/FlyMovieFormat/fmfcat.py
motmot/FlyMovieFormat/fmfcat.py
from optparse import OptionParser import sys import motmot.FlyMovieFormat.FlyMovieFormat as FMF import numpy import fcntl, os if 1: import signal # Restore the default SIGPIPE handler (Python sets a handler to # raise an exception). signal.signal(signal.SIGPIPE, signal.SIG_DFL) def encode_plane( frame, color=False ): if not color: buf = frame.tostring() else: # 420 # See IMC1 at http://msdn.microsoft.com/en-us/library/windows/desktop/dd206750(v=vs.85).aspx h,w = frame.shape f2 = numpy.zeros( (h*2,w), dtype=numpy.uint8) f2[:h, :] = frame f2[h:, :] = 128 buf = f2.tostring() return buf def doit( filename, raten=25, # numerator rated=1, # denom aspectn = 1, # numerator aspectd = 1, # denom rotate_180 = False, autocrop = False, color = False, ): fmf = FMF.FlyMovie(filename) if fmf.get_format() not in ['MONO8','RAW8']: raise NotImplementedError('Only MONO8 and RAW8 formats are currently supported.') width = fmf.get_width()//(fmf.get_bits_per_pixel()//8) height = fmf.get_height() if autocrop: use_width = (width >> 4) << 4 use_height = (height >> 4) << 4 print >> sys.stderr, 'fmfcat autocropping from (%d,%d) to (%d,%d)'%( width,height, use_width,use_height) else: use_width = width use_height = height Y4M_MAGIC = 'YUV4MPEG2' Y4M_FRAME_MAGIC = 'FRAME' inter = 'Ip' # progressive if not color: # Warn about not being in spec? OTOH it works in VLC and # Ubuntu Precise mplayer(2), but not Medibuntu Precise # mplayer. # See http://wiki.multimedia.cx/index.php?title=YUV4MPEG2 colorspace = 'Cmono' else: # This is only working in VLC colorspace = 'C420' out_fd = sys.stdout fcntl.fcntl(out_fd.fileno(), fcntl.F_SETFL, os.O_NONBLOCK) out_fd.write('%(Y4M_MAGIC)s W%(use_width)d H%(use_height)d ' 'F%(raten)d:%(rated)d %(inter)s A%(aspectn)d:%(aspectd)d ' '%(colorspace)s Xconverted-by-fmfcat\n'%locals()) while 1: try: frame,timestamp = fmf.get_next_frame() except FMF.NoMoreFramesException, err: break out_fd.write('%(Y4M_FRAME_MAGIC)s\n'%locals()) if rotate_180: frame = numpy.rot90(numpy.rot90(frame)) if autocrop: frame = frame[:use_height,:use_width] buf = encode_plane( frame, color=color ) out_fd.write(buf) out_fd.flush() def main(): usage = """%prog FILENAME [options] Pipe the contents of an .fmf file to stdout in the yuv4mpegpipe format. This allows an .fmf file to be converted to any format that ffmpeg supports. For example, to convert the file x.fmf to x.avi using the MPEG4 codec: %prog x.fmf > x.y4m ffmpeg -vcodec msmpeg4v2 -i x.y4m x.avi """ parser = OptionParser(usage) parser.add_option('--rotate-180', action='store_true', default=False ) parser.add_option('--autocrop', action='store_true', default=False ) parser.add_option('--color', action='store_true', default=False ) (options, args) = parser.parse_args() if len(args) != 1: parser.print_help() return filename = args[0] doit( filename = args[0], rotate_180 = options.rotate_180, autocrop = options.autocrop, color = options.color, ) if __name__=='__main__': main()
Python
0.000001
@@ -118,16 +118,28 @@ cntl, os +%0Aimport time %0A%0Aif 1:%0A @@ -2613,25 +2613,312 @@ -out_fd.write(buf) +while 1:%0A try:%0A out_fd.write(buf)%0A break%0A except IOError, err:%0A if err.errno == 11:%0A print %3E%3E sys.stderr, 'write error, waiting...'%0A time.sleep(0.1)%0A continue%0A raise %0A
658d37fff628a3efac1e7202416ac7495960d4ad
Add translator in script
scripts/replay_notifications.py
scripts/replay_notifications.py
#!/usr/bin/env python """ Replay all events in order to create Notification entries to the database which do not exist yet. """ # boilerplate code. copy that import os import sys from argparse import ArgumentParser sys.path.insert(0, os.path.abspath(os.path.dirname(__file__))) # /end boilerplate code from paste.deploy import appconfig from adhocracy.config.environment import load_environment from adhocracy.lib.event.notification import notify from adhocracy.model import meta, Event def load_config(filename): conf = appconfig('config:' + os.path.abspath(filename) + '#content') load_environment(conf.global_conf, conf.local_conf) def parse_args(): parser = ArgumentParser(description=__doc__) parser.add_argument("conf_file", help="configuration to use") return parser.parse_args() def main(): args = parse_args() load_config(args.conf_file) all_events = meta.Session.query(Event).all() for event in all_events: notify(event, database_only=True) meta.Session.commit() if __name__ == '__main__': sys.exit(main())
Python
0.000005
@@ -332,16 +332,82 @@ ppconfig +%0Aimport pylons%0Afrom pylons.i18n.translation import _get_translator %0A%0Afrom a @@ -708,16 +708,123 @@ l_conf)%0A + translator = _get_translator(pylons.config.get('lang'))%0A pylons.translator._push_object(translator)%0A %0A%0Adef pa
6a0ee1d994eaaa563301389295c75fa61999791d
Remove unintended kwarg in mesh class
cubix/core/mesh.py
cubix/core/mesh.py
from cubix.core.pycompat import * from cubix.core import glmath from cubix.core.opengl import gl, pgl def buffer_object(data): vbo = pgl.glGenBuffers(1) gl.glBindBuffer(gl.GL_ARRAY_BUFFER, vbo) pgl.glBufferData(gl.GL_ARRAY_BUFFER, data, gl.GL_STATIC_DRAW) return vbo def bind_object(dataLoc, vbo): gl.glEnableVertexAttribArray(dataLoc) gl.glBindBuffer(gl.GL_ARRAY_BUFFER, vbo) pgl.glVertexAttribPointer(dataLoc, 2, gl.GL_FLOAT, gl.GL_FALSE, 0, None) def unbind_object(dataLoc): gl.glBindBuffer(gl.GL_ARRAY_BUFFER, 0) gl.glDisableVertexAttribArray(dataLoc) class Mesh(object): def __init__(self, program, texture): self.program = program self.vertLoc = self.program.get_attribute(b'position') self.UVLoc = self.program.get_attribute(b'vertexUV') self.modelID = self.program.new_uniform(b'model') self.texture = texture self.xPos = 0 self.yPos = 0 self.xPosDelta = 0 self.yPosDelta = 0 self._scale = 1 self.scaleDelta = 0 self.modelMatrix = glmath.Matrix(4) self.data = [ [0.0, 1.0], [1.0, 1.0], [0.0, 0.0], [1.0, 0.0], ] x1, x2, y1, y2 = self.texture.get_uvcoords()[3] self.texCoord = [ [x1, y2], [x2, y2], [x1, y1], [x2, y1], ] self.nverts = 4 self.vao = pgl.glGenVertexArrays(1) self.buffer_objects() def scale(self, value): self.scaleDelta = value / self._scale self._scale = value def translate(self, x, y): self.xPosDelta += x - self.xPos self.yPosDelta += y - self.yPos self.xPos = x self.yPos = y def update(self): if self.scaleDelta: vecScale = glmath.Vector(3, data=[self.scaleDelta, self.scaleDelta, 0.0]) self.modelMatrix.i_scale(vecScale) self.scaleDelta = 0 if self.xPosDelta or self.yPosDelta: vecTrans = glmath.Vector(3, data=[self.xPosDelta, self.yPosDelta, 0.0]) self.modelMatrix.i_translate(vecTrans) self.xPosDelta = 0 self.yPosDelta = 0 def render(self): self.program.set_uniform_matrix(self.modelID, self.modelMatrix) gl.glBindVertexArray(self.vao) self.texture.bind() bind_object(self.vertLoc, self.vbo) bind_object(self.UVLoc, self.uvbo) gl.glDrawArrays(gl.GL_TRIANGLE_STRIP, 0, self.nverts) unbind_object(self.UVLoc) unbind_object(self.vertLoc) gl.glBindVertexArray(0) def buffer_objects(self): self.vbo = buffer_object(self.data) self.uvbo = buffer_object(self.texCoord)
Python
0
@@ -1241,265 +1241,58 @@ -x1, x2, y1, y2 = self.texture.get_uvcoords()%5B3%5D%0A%0A self.texCoord = %5B%0A %5Bx1, y2%5D,%0A %5Bx2, y2%5D,%0A %5Bx1, y1%5D,%0A %5Bx2, y1%5D,%0A %5D%0A self.nverts = 4%0A%0A self.vao = pgl.glGenVertexArrays(1) +self.texCoord = self.data%0A self.nverts = 4%0A %0A @@ -2145,47 +2145,8 @@ x)%0A%0A - gl.glBindVertexArray(self.vao)%0A
054e2d98a450b75427a7b06c4549373c2f4bc7a3
Remove default id reosolver from open tracing
saleor/core/tracing.py
saleor/core/tracing.py
from functools import partial from graphene.types.resolver import default_resolver from graphql import ResolveInfo def should_trace(info: ResolveInfo) -> bool: if info.field_name not in info.parent_type.fields: return False resolver = info.parent_type.fields[info.field_name].resolver return not ( resolver is None or is_default_resolver(resolver) or is_introspection_field(info) ) def is_introspection_field(info: ResolveInfo): if info.path is not None: for path in info.path: if isinstance(path, str) and path.startswith("__"): return True return False def is_default_resolver(resolver): while isinstance(resolver, partial): resolver = resolver.func if resolver is default_resolver: return True return resolver is default_resolver
Python
0
@@ -24,16 +24,52 @@ artial%0A%0A +from graphene.relay import GlobalID%0A from gra @@ -719,16 +719,81 @@ olver):%0A + default_resolvers = %5Bdefault_resolver, GlobalID.id_resolver%5D%0A whil @@ -871,33 +871,33 @@ if resolver i -s +n default_resolve @@ -897,16 +897,17 @@ resolver +s :%0A @@ -945,17 +945,17 @@ solver i -s +n default @@ -947,25 +947,26 @@ lver in default_resolver +s %0A
984f5cd2d36634de7bd9876c69f6b3e19ae0c1bd
Fix documentation for MySQL instrumentation (#665)
instrumentation/opentelemetry-instrumentation-mysql/src/opentelemetry/instrumentation/mysql/__init__.py
instrumentation/opentelemetry-instrumentation-mysql/src/opentelemetry/instrumentation/mysql/__init__.py
# Copyright The OpenTelemetry Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ MySQL instrumentation supporting `mysql-connector`_, it can be enabled by using ``MySQLInstrumentor``. .. _mysql-connector: https://pypi.org/project/mysql-connector/ Usage ----- .. code:: python import mysql.connector from opentelemetry.instrumentation.mysql import MySQLInstrumentor MySQLInstrumentor().instrument() cnx = mysql.connector.connect(database="MySQL_Database") cursor = cnx.cursor() cursor.execute("INSERT INTO test (testField) VALUES (123)" cursor.close() cnx.close() API --- """ from typing import Collection import mysql.connector from opentelemetry.instrumentation import dbapi from opentelemetry.instrumentation.instrumentor import BaseInstrumentor from opentelemetry.instrumentation.mysql.package import _instruments from opentelemetry.instrumentation.mysql.version import __version__ class MySQLInstrumentor(BaseInstrumentor): _CONNECTION_ATTRIBUTES = { "database": "database", "port": "server_port", "host": "server_host", "user": "user", } _DATABASE_SYSTEM = "mysql" def instrumentation_dependencies(self) -> Collection[str]: return _instruments def _instrument(self, **kwargs): """Integrate with MySQL Connector/Python library. https://dev.mysql.com/doc/connector-python/en/ """ tracer_provider = kwargs.get("tracer_provider") dbapi.wrap_connect( __name__, mysql.connector, "connect", self._DATABASE_SYSTEM, self._CONNECTION_ATTRIBUTES, version=__version__, tracer_provider=tracer_provider, ) def _uninstrument(self, **kwargs): """"Disable MySQL instrumentation""" dbapi.unwrap_connect(mysql.connector, "connect") # pylint:disable=no-self-use def instrument_connection(self, connection, tracer_provider=None): """Enable instrumentation in a MySQL connection. Args: connection: The connection to instrument. tracer_provider: The optional tracer provider to use. If omitted the current globally configured one is used. Returns: An instrumented connection. """ return dbapi.instrument_connection( __name__, connection, self._DATABASE_SYSTEM, self._CONNECTION_ATTRIBUTES, version=__version__, tracer_provider=tracer_provider, ) def uninstrument_connection(self, connection): """Disable instrumentation in a MySQL connection. Args: connection: The connection to uninstrument. Returns: An uninstrumented connection. """ return dbapi.uninstrument_connection(connection)
Python
0
@@ -1066,16 +1066,17 @@ S (123)%22 +) %0A cur
53acc35eda9984fdd9eaba78be45afd636c94098
use unicode literals in shell instead of python2-only byte strings
lambda_uploader/shell.py
lambda_uploader/shell.py
# -*- coding: utf-8 -*- # Copyright 2015-2016 Rackspace US, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """lambda-uploader - Simple way to create and upload python lambda jobs""" from __future__ import print_function import sys import logging import traceback import lambda_uploader from os import getcwd, path, getenv from lambda_uploader import package, config, uploader, subscribers from boto3 import __version__ as boto3_version from botocore import __version__ as botocore_version LOG = logging.getLogger(__name__) NAMESPACE = 'rax_jira' CHECK = '\xe2\x9c\x85' INTERROBANG = '\xe2\x81\x89\xef\xb8\x8f' RED_X = '\xe2\x9d\x8c' LAMBDA = '\xce\xbb' TRACEBACK_MESSAGE = """%s Unexpected error. Please report this traceback. Uploader: %s Botocore: %s Boto3: %s """ # Used for stdout for shell def _print(txt): # Windows Powershell doesn't support Unicode if sys.platform == 'win32' or sys.platform == 'cygwin': print(txt) else: # Add the lambda symbol print("%s %s" % (LAMBDA, txt)) def _execute(args): pth = path.abspath(args.function_dir) cfg = config.Config(pth, args.config, role=args.role, variables=args.variables) if args.s3_bucket: cfg.set_s3(args.s3_bucket, args.s3_key) if args.no_virtualenv: # specified flag to omit entirely venv = False elif args.virtualenv: # specified a custom virtualenv venv = args.virtualenv else: # build and include virtualenv, the default venv = None if args.no_build: pkg = package.create_package(pth) else: _print('Building Package') requirements = cfg.requirements if args.requirements: requirements = path.abspath(args.requirements) extra_files = cfg.extra_files if args.extra_files: extra_files = args.extra_files pkg = package.build_package(pth, requirements, venv, cfg.ignore, extra_files, pyexec=cfg.runtime) if not args.no_clean: pkg.clean_workspace() if not args.no_upload: # Set publish if flagged to do so if args.publish: cfg.set_publish() create_alias = False # Set alias if the arg is passed if args.alias is not None: cfg.set_alias(args.alias, args.alias_description) create_alias = True _print('Uploading Package') upldr = uploader.PackageUploader(cfg, args.profile) upldr.upload(pkg) # If the alias was set create it if create_alias: upldr.alias() if cfg.subscription: _print('Creating subscription') subscribers.create_subscriptions(cfg, args.profile) pkg.clean_zipfile() _print('Fin') def main(arv=None): """lambda-uploader command line interface.""" # Check for Python 2.7 or later if sys.version_info[0] < 3 and not sys.version_info[1] == 7: raise RuntimeError('lambda-uploader requires Python 2.7 or later') import argparse parser = argparse.ArgumentParser( description='Simple way to create and upload python lambda jobs') parser.add_argument('--version', '-v', action='version', version=lambda_uploader.__version__) parser.add_argument('--no-upload', dest='no_upload', action='store_const', help='dont upload the zipfile', const=True) parser.add_argument('--no-clean', dest='no_clean', action='store_const', help='dont cleanup the temporary workspace', const=True) parser.add_argument('--publish', '-p', dest='publish', action='store_const', help='publish an upload to an immutable version', const=True) parser.add_argument('--virtualenv', '-e', help='use specified virtualenv instead of making one', default=None) parser.add_argument('--extra-files', '-x', action='append', help='include file or directory path in package', default=[]) parser.add_argument('--no-virtualenv', dest='no_virtualenv', action='store_const', help='do not create or include a virtualenv at all', const=True) parser.add_argument('--role', dest='role', default=getenv('LAMBDA_UPLOADER_ROLE'), help=('IAM role to assign the lambda function, ' 'can be set with $LAMBDA_UPLOADER_ROLE')) parser.add_argument('--variables', dest='variables', help='add environment variables') parser.add_argument('--profile', dest='profile', help='specify AWS cli profile') parser.add_argument('--requirements', '-r', dest='requirements', help='specify a requirements.txt file') alias_help = 'alias for published version (WILL SET THE PUBLISH FLAG)' parser.add_argument('--alias', '-a', dest='alias', default=None, help=alias_help) parser.add_argument('--alias-description', '-m', dest='alias_description', default=None, help='alias description') parser.add_argument('--s3-bucket', '-s', dest='s3_bucket', help='S3 bucket to store the lambda function in', default=None) parser.add_argument('--s3-key', '-k', dest='s3_key', help='Key name of the lambda function s3 object', default=None) parser.add_argument('--config', '-c', help='Overrides lambda.json', default='lambda.json') parser.add_argument('function_dir', default=getcwd(), nargs='?', help='lambda function directory') parser.add_argument('--no-build', dest='no_build', action='store_const', help='dont build the sourcecode', const=True) verbose = parser.add_mutually_exclusive_group() verbose.add_argument('-V', dest='loglevel', action='store_const', const=logging.INFO, help="Set log-level to INFO.") verbose.add_argument('-VV', dest='loglevel', action='store_const', const=logging.DEBUG, help="Set log-level to DEBUG.") parser.set_defaults(loglevel=logging.WARNING) args = parser.parse_args() logging.basicConfig(level=args.loglevel) try: _execute(args) except Exception: print(TRACEBACK_MESSAGE % (INTERROBANG, lambda_uploader.__version__, boto3_version, botocore_version), file=sys.stderr) traceback.print_exc() sys.stderr.flush() sys.exit(1)
Python
0.000003
@@ -1061,20 +1061,9 @@ = ' -%5Cxe2%5Cx9c%5Cx85 +%E2%9C%85 '%0AIN @@ -1079,55 +1079,21 @@ = ' -%5Cxe2%5Cx81%5Cx89%5Cxef%5Cxb8%5Cx8f'%0ARED_X = '%5Cxe2%5Cx9d%5Cx8c +%E2%80%BD'%0ARED_X = '%E2%9D%8C '%0ALA @@ -1104,16 +1104,9 @@ = ' -%5Cxce%5Cxbb +%CE%BB '%0ATR
91449465489ccc71e4d0b5527f0b4b54526b3c02
update comment
python/parameters_tool/strip_comments.py
python/parameters_tool/strip_comments.py
#!/usr/bin/python ## File : strip_comments.py ## Created : <2017-08-03> ## Updated: Time-stamp: <2017-08-03 18:09:41> ## Description : ## For a block of string, remove useless stuff ## 1. Remove leading whitespace ## 2. Remove tailing whitespace ## 3. Remove any lines start with # ## ## Sample: ## server_list=$(echo "$server_list" | python ./strip_comments.py) ##------------------------------------------------------------------- import os, sys def strip_comment(string): string_list = [] for line in string.split("\n"): line = line.strip() if line.startswith("#") or line == "": continue string_list.append(line) return "\n".join(string_list) if __name__ == '__main__': string = sys.stdin.read() print(strip_comment(string)) ## File : strip_comments.py ends
Python
0
@@ -108,13 +108,13 @@ 18: -09:41 +12:22 %3E%0A## @@ -310,16 +310,226 @@ Sample:%0A +## export server_list=%22# server ip%0A## %0A## ## APP%0A## 138.68.52.73:22%0A## ## loadbalancer%0A## #138.68.254.56:2711%0A## #138.68.254.215:2712%22%0A ## se @@ -590,16 +590,53 @@ nts.py)%0A +## server_list: %22138.68.52.73:22%22%0A ##------
df6b13a70241b616f49d4dcc25073084c371f5b1
Swap out license with rights
share/models/creative/base.py
share/models/creative/base.py
from django.db import models from share.models.base import ShareObject from share.models.people import Person from share.models.base import TypedShareObjectMeta from share.models.creative.meta import Venue, Institution, Funder, Award, Tag from share.models.fields import ShareForeignKey, ShareManyToManyField class AbstractCreativeWork(ShareObject, metaclass=TypedShareObjectMeta): title = models.TextField() description = models.TextField() contributors = ShareManyToManyField(Person, through='Contributor') institutions = ShareManyToManyField(Institution, through='ThroughInstitutions') venues = ShareManyToManyField(Venue, through='ThroughVenues') funders = ShareManyToManyField(Funder, through='ThroughFunders') awards = ShareManyToManyField(Award, through='ThroughAwards') subject = ShareForeignKey(Tag, related_name='subjected_%(class)s', null=True) # Note: Null allows inserting of None but returns it as an empty string tags = ShareManyToManyField(Tag, related_name='tagged_%(class)s', through='ThroughTags') created = models.DateTimeField(null=True) published = models.DateTimeField(null=True) free_to_read_type = models.URLField(blank=True) free_to_read_date = models.DateTimeField(null=True) rights = models.TextField() language = models.TextField() class CreativeWork(AbstractCreativeWork): pass
Python
0
@@ -1251,24 +1251,25 @@ (null=True)%0A +%0A rights = @@ -1282,24 +1282,45 @@ s.TextField( +blank=True, null=True )%0A langua @@ -1341,16 +1341,37 @@ xtField( +blank=True, null=True )%0A%0A%0Aclas
a12ed7d9b2517872eef314551481f993a0779f77
Use correct version number in fetch_bnf_codes
openprescribing/pipeline/management/commands/fetch_bnf_codes.py
openprescribing/pipeline/management/commands/fetch_bnf_codes.py
from argparse import RawTextHelpFormatter import datetime import glob import os import zipfile from lxml import html import requests from tqdm import tqdm from django.conf import settings from django.core.management import BaseCommand from openprescribing.utils import mkdir_p class Command(BaseCommand): help = ''' This command downloads the latest BNF codes. The BNF codes are in a compressed CSV file that is on a site that is protected by a captcha. To download the file, you will need to solve the captcha in your browser. This will set a cookie in your browser which you will need to pass to this command. Specifically, you should: * Visit https://apps.nhsbsa.nhs.uk/infosystems/data/showDataSelector.do?reportId=126 in your browser * Solve the captcha and click on "Guest Login" * Copy the value of the JSESSIONID cookie * In Chrome, this can be found in the Application tab of Developer Tools * Run `./manage.py fetch_bnf_codes [cookie]` '''.strip() def create_parser(self, *args, **kwargs): parser = super(Command, self).create_parser(*args, **kwargs) parser.formatter_class = RawTextHelpFormatter return parser def add_arguments(self, parser): parser.add_argument('jsessionid') def handle(self, *args, **kwargs): path = os.path.join(settings.PIPELINE_DATA_BASEDIR, 'bnf_codes') year_and_month = datetime.date.today().strftime('%Y_%m') dir_path = os.path.join(path, year_and_month) mkdir_p(dir_path) zip_path = os.path.join(dir_path, 'download.zip') base_url = 'https://apps.nhsbsa.nhs.uk/infosystems/data/' session = requests.Session() session.cookies['JSESSIONID'] = kwargs['jsessionid'] url = base_url + 'showDataSelector.do' params = {'reportId': '126'} rsp = session.get(url, params=params) tree = html.fromstring(rsp.content) options = tree.xpath('//select[@id="bnfVersion"]/option') year_to_bnf_version = {} for option in options: datestamp, version = option.text.split(' : ') date = datetime.datetime.strptime(datestamp, '%d-%m-%Y') year_to_bnf_version[date.year] = version year = max(year_to_bnf_version) version = year_to_bnf_version[year] url = base_url + 'requestSelectedDownload.do' params = { 'bnfVersion': '68', 'filePath': '', 'dataView': '260', 'format': '', 'defaultReportIdDataSel': '', 'reportId': '126', 'action': 'checkForAvailableDownload', } rsp = session.get(url, params=params) request_id = rsp.json()['requestNo'] url = base_url + 'downloadAvailableReport.zip' params = { 'requestId': request_id, } rsp = session.post(url, params=params, stream=True) total_size = int(rsp.headers['content-length']) with open(zip_path, 'wb') as f: tqdm_iterator = tqdm( rsp.iter_content(32 * 1024), total=total_size, unit='B', unit_scale=True ) for block in tqdm_iterator: f.write(block) with zipfile.ZipFile(zip_path) as zf: zf.extractall(dir_path) csv_paths = glob.glob(os.path.join(dir_path, '*.csv')) assert len(csv_paths) == 1 os.rename(csv_paths[0], os.path.join(dir_path, 'bnf_codes.csv'))
Python
0.000009
@@ -2421,12 +2421,15 @@ n': -'68' +version ,%0A
5404936d559104bfdce982c3ff8e75be83aafce9
fix docstring in focal loss (#1878)
tensorflow_addons/losses/focal_loss.py
tensorflow_addons/losses/focal_loss.py
# Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Implements Focal loss.""" import tensorflow as tf import tensorflow.keras.backend as K from tensorflow_addons.utils.keras_utils import LossFunctionWrapper from tensorflow_addons.utils.types import FloatTensorLike, TensorLike from typeguard import typechecked @tf.keras.utils.register_keras_serializable(package="Addons") class SigmoidFocalCrossEntropy(LossFunctionWrapper): """Implements the focal loss function. Focal loss was first introduced in the RetinaNet paper (https://arxiv.org/pdf/1708.02002.pdf). Focal loss is extremely useful for classification when you have highly imbalanced classes. It down-weights well-classified examples and focuses on hard examples. The loss value is much high for a sample which is misclassified by the classifier as compared to the loss value corresponding to a well-classified example. One of the best use-cases of focal loss is its usage in object detection where the imbalance between the background class and other classes is extremely high. Usage: ```python fl = tfa.losses.SigmoidFocalCrossEntropy() loss = fl( [[0.97], [0.91], [0.03]], [[1.0], [1.0], [0.0]]) print('Loss: ', loss.numpy()) # Loss: [0.00010971, 0.0032975, 0.00030611] ``` Usage with tf.keras API: ```python model = tf.keras.Model(inputs, outputs) model.compile('sgd', loss=tf.keras.losses.SigmoidFocalCrossEntropy()) ``` Args alpha: balancing factor, default value is 0.25 gamma: modulating factor, default value is 2.0 Returns: Weighted loss float `Tensor`. If `reduction` is `NONE`, this has the same shape as `y_true`; otherwise, it is scalar. Raises: ValueError: If the shape of `sample_weight` is invalid or value of `gamma` is less than zero """ @typechecked def __init__( self, from_logits: bool = False, alpha: FloatTensorLike = 0.25, gamma: FloatTensorLike = 2.0, reduction: str = tf.keras.losses.Reduction.NONE, name: str = "sigmoid_focal_crossentropy", ): super().__init__( sigmoid_focal_crossentropy, name=name, reduction=reduction, from_logits=from_logits, alpha=alpha, gamma=gamma, ) @tf.keras.utils.register_keras_serializable(package="Addons") @tf.function def sigmoid_focal_crossentropy( y_true: TensorLike, y_pred: TensorLike, alpha: FloatTensorLike = 0.25, gamma: FloatTensorLike = 2.0, from_logits: bool = False, ) -> tf.Tensor: """ Args y_true: true targets tensor. y_pred: predictions tensor. alpha: balancing factor. gamma: modulating factor. Returns: Weighted loss float `Tensor`. If `reduction` is `NONE`,this has the same shape as `y_true`; otherwise, it is scalar. """ if gamma and gamma < 0: raise ValueError("Value of gamma should be greater than or equal to zero") y_pred = tf.convert_to_tensor(y_pred) y_true = tf.convert_to_tensor(y_true, dtype=y_pred.dtype) # Get the cross_entropy for each entry ce = K.binary_crossentropy(y_true, y_pred, from_logits=from_logits) # If logits are provided then convert the predictions into probabilities if from_logits: pred_prob = tf.sigmoid(y_pred) else: pred_prob = y_pred p_t = (y_true * pred_prob) + ((1 - y_true) * (1 - pred_prob)) alpha_factor = 1.0 modulating_factor = 1.0 if alpha: alpha = tf.convert_to_tensor(alpha, dtype=K.floatx()) alpha_factor = y_true * alpha + (1 - y_true) * (1 - alpha) if gamma: gamma = tf.convert_to_tensor(gamma, dtype=K.floatx()) modulating_factor = tf.pow((1.0 - p_t), gamma) # compute the final loss and return return tf.reduce_sum(alpha_factor * modulating_factor * ce, axis=-1)
Python
0
@@ -1810,30 +1810,36 @@ -%5B%5B0.97%5D, %5B0.91 +y_true = %5B%5B1.0%5D, %5B1.0 %5D, %5B0.0 -3 %5D%5D,%0A @@ -1844,35 +1844,47 @@ ,%0A -%5B%5B1.0%5D, %5B1.0 +y_pred = %5B%5B0.97%5D, %5B0.91 %5D, %5B0.0 +3 %5D%5D)%0A @@ -1927,18 +1927,21 @@ s: %5B -0.00010971 +6.8532745e-06 ,%0A @@ -1986,17 +1986,21 @@ -0.0032975 +1.9097870e-04 ,%0A @@ -2045,18 +2045,21 @@ -0.00030611 +2.0559824e-05 %5D%0A
a7cd6209bd1b975fcc26bba337b7d14c7a4749d3
Apply patch for django<1.11
django_elastipymemcache/memcached.py
django_elastipymemcache/memcached.py
""" Backend for django cache """ import socket from functools import wraps from django.core.cache import InvalidCacheBackendError from django.core.cache.backends.memcached import BaseMemcachedCache from . import client as pyMemcache_client from .cluster_utils import get_cluster_info def invalidate_cache_after_error(f): """ catch any exception and invalidate internal cache with list of nodes """ @wraps(f) def wrapper(self, *args, **kwds): try: return f(self, *args, **kwds) except Exception: self.clear_cluster_nodes_cache() raise return wrapper class ElastiPyMemCache(BaseMemcachedCache): """ backend for Amazon ElastiCache (memcached) with auto discovery mode it used pyMemcache """ def __init__(self, server, params): super(ElastiPyMemCache, self).__init__( server, params, library=pyMemcache_client, value_not_found_exception=ValueError) if len(self._servers) > 1: raise InvalidCacheBackendError( 'ElastiCache should be configured with only one server ' '(Configuration Endpoint)') if len(self._servers[0].split(':')) != 2: raise InvalidCacheBackendError( 'Server configuration should be in format IP:port') self._ignore_cluster_errors = self._options.get( 'ignore_exc', False) def clear_cluster_nodes_cache(self): """clear internal cache with list of nodes in cluster""" if hasattr(self, '_client'): del self._client def get_cluster_nodes(self): """ return list with all nodes in cluster """ server, port = self._servers[0].split(':') try: return get_cluster_info( server, port, self._ignore_cluster_errors )['nodes'] except (socket.gaierror, socket.timeout) as err: raise Exception('Cannot connect to cluster {} ({})'.format( self._servers[0], err )) @property def _cache(self): if getattr(self, '_client', None) is None: self._client = self._lib.Client( self.get_cluster_nodes(), **self._options) return self._client @invalidate_cache_after_error def get(self, *args, **kwargs): return super(ElastiPyMemCache, self).get(*args, **kwargs) @invalidate_cache_after_error def get_many(self, *args, **kwargs): return super(ElastiPyMemCache, self).get_many(*args, **kwargs) @invalidate_cache_after_error def set(self, *args, **kwargs): return super(ElastiPyMemCache, self).set(*args, **kwargs) @invalidate_cache_after_error def set_many(self, *args, **kwargs): return super(ElastiPyMemCache, self).set_many(*args, **kwargs) @invalidate_cache_after_error def delete(self, *args, **kwargs): return super(ElastiPyMemCache, self).delete(*args, **kwargs)
Python
0
@@ -1355,16 +1355,96 @@ port')%0A%0A + # Patch for django%3C1.11%0A self._options = self._options or dict()%0A
a8b6e8579ed886fd7f78f6ad8db3f112dac73816
Use self.settings() context manager in tests
arcutils/tests/test_templatetags.py
arcutils/tests/test_templatetags.py
from django.conf import settings from django.http import HttpRequest from django.template import Context, Template from django.test import TestCase from arcutils.templatetags.arc import cdn_url, google_analytics class TestCDNURLTag(TestCase): def test_cdn_url_has_no_scheme_by_default(self): self.assertEqual(cdn_url('/x/y/z'), '//cdn.research.pdx.edu/x/y/z') def test_leading_slash_is_irrelevant(self): self.assertEqual(cdn_url('/x/y/z'), '//cdn.research.pdx.edu/x/y/z') self.assertEqual(cdn_url('x/y/z'), '//cdn.research.pdx.edu/x/y/z') def test_with_explicit_scheme(self): self.assertEqual(cdn_url('/x/y/z', scheme='http'), 'http://cdn.research.pdx.edu/x/y/z') def test_integration(self): template = Template('{% load arc %}{% cdn_url "/x/y/z" %}') request = HttpRequest() output = template.render(Context({'request': request})) self.assertEqual(output, '//cdn.research.pdx.edu/x/y/z') def test_integration_with_scheme(self): template = Template('{% load arc %}{% cdn_url "/x/y/z" scheme="http" %}') request = HttpRequest() output = template.render(Context({'request': request})) self.assertEqual(output, 'http://cdn.research.pdx.edu/x/y/z') class TestGoogleAnalyticsTag(TestCase): tracking_id = 'UA-XXXXX-Y' def test_ga_script_tag_is_returned_with_defaults(self): output = google_analytics(self.tracking_id).strip() self.assertTrue(output.startswith('<script>')) self.assertTrue(output.endswith('</script>')) self.assertIn(self.tracking_id, output) output_lower = output.lower() self.assertRegex(output_lower, r'%s.+//\s+tracking id' % self.tracking_id.lower()) self.assertRegex(output_lower, r'auto.+//\s+cookie domain') self.assertRegex(output_lower, r'undefined.+//\s+tracker name') self.assertRegex(output_lower, r'undefined.+//\s+fields') def test_ga_script_tag_is_returned_with_options(self): output = google_analytics( self.tracking_id, cookie_domain='example.com', tracker_name='example', fields={'example': 'example'}, ).strip() self.assertTrue(output.startswith('<script>')) self.assertTrue(output.endswith('</script>')) self.assertIn(self.tracking_id, output) output_lower = output.lower() self.assertRegex(output_lower, r'example\.com.+//\s+cookie domain') self.assertRegex(output_lower, r'example.+//\s+tracker name') self.assertRegex(output_lower, r'\{"example": "example"\}.+//\s+fields') def test_html_placeholder_is_returned_in_debug_mode(self): debug = settings.DEBUG settings.DEBUG = True output = google_analytics(self.tracking_id).strip() self.assertTrue(output.startswith('<!--')) self.assertTrue(output.endswith('-->')) self.assertNotIn(self.tracking_id, output) settings.DEBUG = debug def test_html_placeholder_is_returned_when_no_tracking_id_specified(self): debug = settings.DEBUG settings.DEBUG = False output = google_analytics() self.assertTrue(output.startswith('<!--')) self.assertTrue(output.endswith('-->')) self.assertNotIn(self.tracking_id, output) settings.DEBUG = debug
Python
0.000001
@@ -1,37 +1,4 @@ -from django.conf import settings%0A from @@ -2677,47 +2677,26 @@ -debug = settings.DEBUG%0A +with self. settings .DEB @@ -2695,22 +2695,26 @@ ings -. +( DEBUG - = += True -%0A +):%0A @@ -2769,32 +2769,36 @@ strip()%0A + self.assertTrue( @@ -2824,32 +2824,36 @@ %3C!--'))%0A + self.assertTrue( @@ -2876,32 +2876,36 @@ '--%3E'))%0A + + self.assertNotIn @@ -2934,39 +2934,8 @@ put) -%0A settings.DEBUG = debug %0A%0A @@ -3023,47 +3023,26 @@ -debug = settings.DEBUG%0A +with self. settings .DEB @@ -3041,23 +3041,27 @@ ings -. +( DEBUG - = += False -%0A +):%0A @@ -3092,32 +3092,36 @@ ytics()%0A + self.assertTrue( @@ -3147,32 +3147,36 @@ %3C!--'))%0A + self.assertTrue( @@ -3199,32 +3199,36 @@ '--%3E'))%0A + + self.assertNotIn @@ -3258,35 +3258,4 @@ ut)%0A - settings.DEBUG = debug%0A
b7471d6b1d209c8abe2c4920e5d86d8a542758b4
Remove unused import in StartSliceJob
plugins/CuraEngineBackend/StartSliceJob.py
plugins/CuraEngineBackend/StartSliceJob.py
# Copyright (c) 2015 Ultimaker B.V. # Cura is released under the terms of the AGPLv3 or higher. import time import numpy from UM.Job import Job from UM.Application import Application from UM.Logger import Logger from UM.Scene.SceneNode import SceneNode from UM.Scene.Iterator.DepthFirstIterator import DepthFirstIterator from cura.OneAtATimeIterator import OneAtATimeIterator from . import Cura_pb2 ## Job class that handles sending the current scene data to CuraEngine class StartSliceJob(Job): def __init__(self, profile, socket): super().__init__() self._scene = Application.getInstance().getController().getScene() self._profile = profile self._socket = socket def run(self): self._scene.acquireLock() for node in DepthFirstIterator(self._scene.getRoot()): if node.callDecoration("getLayerData"): node.getParent().removeChild(node) break object_groups = [] if self._profile.getSettingValue("print_sequence") == "one_at_a_time": for node in OneAtATimeIterator(self._scene.getRoot()): temp_list = [] if getattr(node, "_outside_buildarea", False): continue children = node.getAllChildren() children.append(node) for child_node in children: if type(child_node) is SceneNode and child_node.getMeshData() and child_node.getMeshData().getVertices() is not None: temp_list.append(child_node) if temp_list: object_groups.append(temp_list) Job.yieldThread() else: temp_list = [] for node in DepthFirstIterator(self._scene.getRoot()): if type(node) is SceneNode and node.getMeshData() and node.getMeshData().getVertices() is not None: if not getattr(node, "_outside_buildarea", False): temp_list.append(node) Job.yieldThread() if temp_list: object_groups.append(temp_list) self._scene.releaseLock() if not object_groups: return self._sendSettings(self._profile) slice_message = Cura_pb2.Slice() for group in object_groups: group_message = slice_message.object_lists.add() for object in group: mesh_data = object.getMeshData().getTransformed(object.getWorldTransformation()) obj = group_message.objects.add() obj.id = id(object) verts = numpy.array(mesh_data.getVertices()) verts[:,[1,2]] = verts[:,[2,1]] verts[:,1] *= -1 obj.vertices = verts.tostring() self._handlePerObjectSettings(object, obj) Job.yieldThread() Logger.log("d", "Sending data to engine for slicing.") self._socket.sendMessage(slice_message) self.setResult(True) def _sendSettings(self, profile): msg = Cura_pb2.SettingList() for key, value in profile.getAllSettingValues(include_machine = True).items(): s = msg.settings.add() s.name = key s.value = str(value).encode("utf-8") self._socket.sendMessage(msg) def _handlePerObjectSettings(self, node, message): profile = node.callDecoration("getProfile") if profile: for key, value in profile.getAllSettingValues().items(): setting = message.settings.add() setting.name = key setting.value = str(value).encode() Job.yieldThread() object_settings = node.callDecoration("getAllSettingValues") if not object_settings: return for key, value in object_settings.items(): setting = message.settings.add() setting.name = key setting.value = str(value).encode() Job.yieldThread()
Python
0
@@ -94,20 +94,8 @@ r.%0A%0A -import time%0A impo
4ade02ef18b7f6746989f27a20c26b1d62e0c4a9
Update if block to use elif
canvasapi/requester.py
canvasapi/requester.py
from __future__ import absolute_import, division, print_function, unicode_literals from datetime import datetime import requests from canvasapi.exceptions import ( BadRequest, CanvasException, Forbidden, InvalidAccessToken, ResourceDoesNotExist, Unauthorized ) class Requester(object): """ Responsible for handling HTTP requests. """ def __init__(self, base_url, access_token): """ :param base_url: The base URL of the Canvas instance's API. :type base_url: str :param access_token: The API key to authenticate requests with. :type access_token: str """ self.base_url = base_url self.access_token = access_token self._session = requests.Session() self._cache = [] def request( self, method, endpoint=None, headers=None, use_auth=True, _url=None, _kwargs=None, **kwargs): """ Make a request to the Canvas API and return the response. :param method: The HTTP method for the request. :type method: str :param endpoint: The endpoint to call. :type endpoint: str :param headers: Optional HTTP headers to be sent with the request. :type headers: dict :param use_auth: Optional flag to remove the authentication header from the request. :type use_auth: bool :param _url: Optional argument to send a request to a URL outside of the Canvas API. If this is selected and an endpoint is provided, the endpoint will be ignored and only the _url argument will be used. :type _url: str :param _kwargs: A list of 2-tuples representing processed keyword arguments to be sent to Canvas as params or data. :type _kwargs: `list` :rtype: str """ full_url = _url if _url else "{}{}".format(self.base_url, endpoint) if not headers: headers = {} if use_auth: auth_header = {'Authorization': 'Bearer {}'.format(self.access_token)} headers.update(auth_header) # Convert kwargs into list of 2-tuples and combine with _kwargs. _kwargs = _kwargs or [] _kwargs.extend(kwargs.items()) # Do any final argument processing before sending to request method. for i, kwarg in enumerate(_kwargs): kw, arg = kwarg # Convert boolean objects to a lowercase string. if isinstance(arg, bool): _kwargs[i] = (kw, str(arg).lower()) # Convert any datetime objects into ISO 8601 formatted strings. if isinstance(arg, datetime): _kwargs[i] = (kw, arg.isoformat()) # Determine the appropriate request method. if method == 'GET': req_method = self._get_request elif method == 'POST': req_method = self._post_request elif method == 'DELETE': req_method = self._delete_request elif method == 'PUT': req_method = self._put_request # Call the request method response = req_method(full_url, headers, _kwargs) # Add response to internal cache if len(self._cache) > 4: self._cache.pop() self._cache.insert(0, response) # Raise for status codes if response.status_code == 400: raise BadRequest(response.text) elif response.status_code == 401: if 'WWW-Authenticate' in response.headers: raise InvalidAccessToken(response.json()) else: raise Unauthorized(response.json()) elif response.status_code == 403: raise Forbidden(response.text) elif response.status_code == 404: raise ResourceDoesNotExist('Not Found') elif response.status_code == 500: raise CanvasException("API encountered an error processing your request") return response def _get_request(self, url, headers, params=None): """ Issue a GET request to the specified endpoint with the data provided. :param url: str :pararm headers: dict :param params: dict """ return self._session.get(url, headers=headers, params=params) def _post_request(self, url, headers, data=None): """ Issue a POST request to the specified endpoint with the data provided. :param url: str :pararm headers: dict :param data: dict """ # Grab file from data. file = None for tup in data: if tup[0] == 'file': file = {'file': tup[1]} break # Remove file entry from data. data[:] = [tup for tup in data if tup[0] != 'file'] return self._session.post(url, headers=headers, data=data, files=file) def _delete_request(self, url, headers, data=None): """ Issue a DELETE request to the specified endpoint with the data provided. :param url: str :pararm headers: dict :param data: dict """ return self._session.delete(url, headers=headers, data=data) def _put_request(self, url, headers, data=None): """ Issue a PUT request to the specified endpoint with the data provided. :param url: str :pararm headers: dict :param data: dict """ return self._session.put(url, headers=headers, data=data)
Python
0
@@ -2644,32 +2644,34 @@ gs.%0A +el if isinstance(ar
a71eeb6961acdd4ccc0f888a47b8799755d56eaa
Make it fill the screen with boxes
cubix/game/gamemanager.py
cubix/game/gamemanager.py
from PIL import Image from cubix.core.pycompat import * from cubix.core import window from cubix.core import events from cubix.core import context from cubix.core import timing from cubix.core import files from cubix.core import shaders from cubix.core.opengl import gl from cubix.core.opengl import pgl from cubix.core import glmath from cubix.core import texture from cubix.core import mesh class GameManager(object): ''' Entry point into the game, and manages the game in general ''' def __init__(self): self.width = 800 self.height = 600 self.title = "Cubix" self.running = False window.init_video() self.fpsTimer = timing.FpsCounter() self.fpsEstimate = 0 self.events = events.Events() self.window = window.Window(self.title, self.width, self.height, False) self.context = context.Context(3, 3, 2) self.context.window = self.window self.events.add_listener(self.process_event) gl.init() major = pgl.glGetInteger(gl.GL_MAJOR_VERSION) minor = pgl.glGetInteger(gl.GL_MINOR_VERSION) print ('OpenGL Version: {}.{}'.format(major, minor)) gl.glViewport(0, 0, self.width, self.height) vsPath = files.resolve_path('data', 'shaders', 'main.vs') fsPath = files.resolve_path('data', 'shaders', 'main.fs') vertex = shaders.VertexShader(vsPath) fragment = shaders.FragmentShader(fsPath) self.program = shaders.ShaderProgram(vertex, fragment) self.program.use() self.program.new_uniform(b'ortho') # Load character image into new opengl texture imagePath = files.resolve_path('data', 'images', 'cubix.png') self.cubixTex = texture.Texture(imagePath, self.program) self.meshTest = mesh.Mesh(self.program, self.cubixTex) self.meshTest.scale(32) self.meshes = [] rowx = 0 rowy = 0 for i in range(int((self.width/37) * (self.height/37))): self.meshes.append(mesh.Mesh(self.program, self.cubixTex)) self.meshes[i].scale(32) if i % (self.width//32) == 24: rowx += 1 rowy = 0 self.meshes[i].translate(rowy*32 + rowy * 5, rowx * 37) rowy += 1 print (len(self.meshes)) self.ortho = glmath.ortho(0.0, self.width, self.height, 0.0, -1.0, 1.0) self.program.set_uniform(b'ortho', self.ortho) glerr = gl.glGetError() if glerr != 0: print ('GLError:', glerr) def resize(self, width, height): self.width = width self.height = height gl.glViewport(0, 0, self.width, self.height) self.ortho = glmath.ortho(0.0, self.width, self.height, 0.0, -1.0, 1.0) self.program.set_uniform(b'ortho', self.ortho) def process_event(self, event, data): if event == 'quit' or event == 'window_close': self.running = False elif event == 'window_resized': winID, x, y = data self.resize(x, y) elif event == 'mouse_move': x, y = data self.meshTest.translate(x, y) def update(self): self.meshTest.update() for i in range(len(self.meshes)): self.meshes[i].update() def render(self): gl.glClearColor(0.5, 0.5, 0.5, 1.0) gl.glClear(gl.GL_COLOR_BUFFER_BIT | gl.GL_DEPTH_BUFFER_BIT) self.meshTest.render() for i in range(len(self.meshes)): self.meshes[i].render() def do_run(self): ''' Process a single loop ''' self.events.process() self.update() self.render() self.window.flip() self.fpsTimer.tick() if self.fpsTimer.fpsTime >= 2000: self.fpsEstimate = self.fpsTimer.get_fps() print ("{:.2f} fps".format(self.fpsEstimate)) def run(self): ''' Called from launcher doesnt exit until the game is quit ''' self.running = True while self.running: self.do_run()
Python
0.000004
@@ -1952,45 +1952,77 @@ -for i in range(int((self.width/37) * +numPerRow = int(self.width/37)%0A for i in range(numPerRow * int (sel @@ -2034,17 +2034,16 @@ ght/37)) -) :%0A @@ -2163,34 +2163,49 @@ if -i %25 (self.width//32) == 24 +(rowy*32) + (rowy * 5) + 32 %3E= self.width :%0A @@ -2615,16 +2615,633 @@ glerr)%0A%0A + def resize_obj(self):%0A rowx = 0%0A rowy = 0%0A numPerRow = int(self.width/37)%0A newMesh = %5B%5D%0A for i in range(numPerRow * int(self.height/37)):%0A if i %3E len(self.meshes) - 1:%0A newMesh.append(mesh.Mesh(self.program, self.cubixTex))%0A newMesh%5Bi%5D.scale(32)%0A else:%0A newMesh.append(self.meshes%5Bi%5D)%0A if (rowy*32) + (rowy * 5) + 32 %3E= self.width:%0A rowx += 1%0A rowy = 0%0A newMesh%5Bi%5D.translate(rowy*32 + rowy * 5, rowx * 37)%0A rowy += 1%0A self.meshes = newMesh%0A%0A def @@ -3454,24 +3454,24 @@ -1.0, 1.0)%0A - self @@ -3512,16 +3512,42 @@ f.ortho) +%0A self.resize_obj() %0A%0A de
9480d9afe7cc1052d9166e28dc07182765dd2a89
Raise JobError if can't import. Fixes #284
django_extensions/management/jobs.py
django_extensions/management/jobs.py
""" django_extensions.management.jobs """ import os from imp import find_module _jobs = None def noneimplementation(meth): return None class JobError(Exception): pass class BaseJob(object): help = "undefined job description." when = None def execute(self): raise NotImplementedError("Job needs to implement the execute method") class MinutelyJob(BaseJob): when = "minutely" class QuarterHourlyJob(BaseJob): when = "quarter_hourly" class HourlyJob(BaseJob): when = "hourly" class DailyJob(BaseJob): when = "daily" class WeeklyJob(BaseJob): when = "weekly" class MonthlyJob(BaseJob): when = "monthly" class YearlyJob(BaseJob): when = "yearly" def my_import(name): imp = __import__(name) mods = name.split('.') if len(mods) > 1: for mod in mods[1:]: imp = getattr(imp, mod) return imp def find_jobs(jobs_dir): try: return [f[:-3] for f in os.listdir(jobs_dir) if not f.startswith('_') and f.endswith(".py")] except OSError: return [] def find_job_module(app_name, when=None): parts = app_name.split('.') parts.append('jobs') if when: parts.append(when) parts.reverse() path = None while parts: part = parts.pop() f, path, descr = find_module(part, path and [path] or None) return path def import_job(app_name, name, when=None): jobmodule = "%s.jobs.%s%s" % (app_name, when and "%s." % when or "", name) job_mod = my_import(jobmodule) # todo: more friendly message for AttributeError if job_mod does not exist try: job = job_mod.Job except: raise JobError("Job module %s does not contain class instance named 'Job'" % jobmodule) if when and not (job.when == when or job.when is None): raise JobError("Job %s is not a %s job." % (jobmodule, when)) return job def get_jobs(when=None, only_scheduled=False): """ Returns a dictionary mapping of job names together with their respective application class. """ # FIXME: HACK: make sure the project dir is on the path when executed as ./manage.py import sys try: cpath = os.path.dirname(os.path.realpath(sys.argv[0])) ppath = os.path.dirname(cpath) if ppath not in sys.path: sys.path.append(ppath) except: pass _jobs = {} if True: from django.conf import settings for app_name in settings.INSTALLED_APPS: scandirs = (None, 'minutely', 'quarter_hourly', 'hourly', 'daily', 'weekly', 'monthly', 'yearly') if when: scandirs = None, when for subdir in scandirs: try: path = find_job_module(app_name, subdir) for name in find_jobs(path): if (app_name, name) in _jobs: raise JobError("Duplicate job %s" % name) job = import_job(app_name, name, subdir) if only_scheduled and job.when is None: # only include jobs which are scheduled continue if when and job.when != when: # generic job not in same schedule continue _jobs[(app_name, name)] = job except ImportError: # No job module -- continue scanning pass return _jobs def get_job(app_name, job_name): jobs = get_jobs() if app_name: return jobs[(app_name, job_name)] else: for a, j in jobs.keys(): if j == job_name: return jobs[(a, j)] raise KeyError("Job not found: %s" % job_name) def print_jobs(when=None, only_scheduled=False, show_when=True, show_appname=False, show_header=True): jobmap = get_jobs(when, only_scheduled=only_scheduled) print("Job List: %i jobs" % len(jobmap)) jlist = jobmap.keys() jlist.sort() appname_spacer = "%%-%is" % max(len(e[0]) for e in jlist) name_spacer = "%%-%is" % max(len(e[1]) for e in jlist) when_spacer = "%%-%is" % max(len(e.when) for e in jobmap.values() if e.when) if show_header: line = " " if show_appname: line += appname_spacer % "appname" + " - " line += name_spacer % "jobname" if show_when: line += " - " + when_spacer % "when" line += " - help" print(line) print("-" * 80) for app_name, job_name in jlist: job = jobmap[(app_name, job_name)] line = " " if show_appname: line += appname_spacer % app_name + " - " line += name_spacer % job_name if show_when: line += " - " + when_spacer % (job.when and job.when or "") line += " - " + job.help print(line)
Python
0
@@ -732,24 +732,37 @@ port(name):%0A + try:%0A imp = __ @@ -772,24 +772,124 @@ ort__(name)%0A + except ImportError, e:%0A raise JobError(%22Failed to import %25s with error %25s%22 %25 (name, e))%0A%0A mods = n
58a36b2f935f15a78edf6b6c158d91797ba105e9
Add inverse of shortflags test for sanity check
tests/cli.py
tests/cli.py
import os import sys import StringIO from spec import eq_, skip, Spec, ok_, trap from invoke.runner import run from invoke.parser import Parser, Context from invoke.collection import Collection from invoke.tasks import task from invoke.exceptions import Failure import invoke from _utils import support class CLI(Spec): "Command-line behavior" def setup(self): os.chdir(support) self.result = run("invoke -c integration print_foo", hide='both') # Yo dogfood, I heard you like invoking @trap def basic_invocation(self): eq_(self.result.stdout, "foo\n") @trap def implicit_task_module(self): # Contains tasks.py os.chdir('implicit') # Doesn't specify --collection result = run("invoke foo") eq_(result.stdout, "Hm\n") @trap def invocation_with_args(self): result = run("invoke -c integration print_name --name whatevs") eq_(result.stdout, "whatevs\n") @trap def shorthand_binary_name(self): eq_(self.result.stdout, "foo\n") @trap def version_info(self): eq_(run("invoke -V").stdout, "Invoke %s\n" % invoke.__version__) TB_SENTINEL = 'Traceback (most recent call last)' class HighLevelFailures(Spec): def command_failure(self): "Command failure doesn't show tracebacks" result = run("inv -c fail simple", warn=True, hide='both') assert TB_SENTINEL not in result.stderr assert result.exited != 0 class parsing: def should_not_show_tracebacks(self): result = run("inv -c fail missing_pos", warn=True, hide='both') assert TB_SENTINEL not in result.stderr def should_show_core_usage_on_core_failures(self): skip() def should_show_context_usage_on_context_failures(self): skip() def load_failure(self): skip() class CLIParsing(Spec): """ High level parsing tests """ def setup(self): @task(positional=[]) def mytask(mystring, s, boolean=False, b=False, v=False): pass @task def mytask2(): pass @task(positional=[]) def mytask3(mystring): pass c = Collection() c.add_task('mytask', mytask) c.add_task('mytask2', mytask2) c.add_task('mytask3', mytask3) self.c = c def _parser(self): return Parser(self.c.to_contexts()) def _parse(self, argstr): return self._parser().parse_argv(argstr.split()) def _compare(self, invoke, flagname, value): invoke = "mytask " + invoke result = self._parse(invoke) eq_(result.to_dict()['mytask'][flagname], value) def boolean_args(self): "mytask --boolean" self._compare("--boolean", 'boolean', True) def flag_then_space_then_value(self): "mytask --mystring foo" self._compare("--mystring foo", 'mystring', 'foo') def flag_then_equals_sign_then_value(self): "mytask --mystring=foo" self._compare("--mystring=foo", 'mystring', 'foo') def short_boolean_flag(self): "mytask -b" self._compare("-b", 'b', True) def short_flag_then_space_then_value(self): "mytask -s value" self._compare("-s value", 's', 'value') def short_flag_then_equals_sign_then_value(self): "mytask -s=value" self._compare("-s=value", 's', 'value') def short_flag_with_adjacent_value(self): "mytask -svalue" r = self._parse("mytask -svalue") eq_(r[0].args.s.value, 'value') def _flag_value_task(self, value): r = self._parse("mytask -s %s mytask2" % value) eq_(len(r), 2) eq_(r[0].name, 'mytask') eq_(r[0].args.s.value, value) eq_(r[1].name, 'mytask2') def flag_value_then_task(self): "mytask -s value mytask2" self._flag_value_task('value') def flag_value_same_as_task_name(self): "mytask -s mytask2 mytask2" self._flag_value_task('mytask2') def three_tasks_with_args(self): "mytask --boolean mytask3 --mystring foo mytask2" r = self._parse("mytask --boolean mytask3 --mystring foo mytask2") eq_(len(r), 3) eq_([x.name for x in r], ['mytask', 'mytask3', 'mytask2']) eq_(r[0].args.boolean.value, True) eq_(r[1].args.mystring.value, 'foo') def tasks_with_duplicately_named_kwargs(self): "mytask --mystring foo mytask3 --mystring bar" r = self._parse("mytask --mystring foo mytask3 --mystring bar") eq_(r[0].name, 'mytask') eq_(r[0].args.mystring.value, 'foo') eq_(r[1].name, 'mytask3') eq_(r[1].args.mystring.value, 'bar') def multiple_short_flags_adjacent(self): "mytask -bv" r = self._parse("mytask -bv") a = r[0].args eq_(a.b.value, True) eq_(a.v.value, True)
Python
0.000003
@@ -4803,18 +4803,72 @@ task -bv -%22%0A + (and inverse)%22%0A for args in ('-bv', '-vb'):%0A @@ -4895,22 +4895,31 @@ ask --bv%22)%0A +%25s%22 %25 args)%0A a = @@ -4914,16 +4914,17 @@ + a = r%5B0%5D @@ -4929,16 +4929,20 @@ 0%5D.args%0A + @@ -4954,32 +4954,36 @@ .b.value, True)%0A + eq_(a.v.
e8793a837dc88ee283193042e28c393e89346fb2
Remove unused _suffix arg from CASBackend._validate_ticket()
arcutils/cas/backends.py
arcutils/cas/backends.py
import logging import textwrap from xml.etree import ElementTree from urllib.request import urlopen from django.conf import settings as django_settings from django.contrib.auth import get_user_model from django.contrib.auth.backends import ModelBackend from django.contrib.auth.hashers import make_password from django.utils.module_loading import import_string from arcutils.exc import ARCUtilsDeprecationWarning from arcutils.decorators import cached_property from .settings import settings from .utils import make_cas_url, parse_cas_tree, tree_find log = logging.getLogger(__name__) class CASBackend: """CAS authentication backend.""" def authenticate(self, ticket, service): cas_data = self._validate_ticket(ticket, service) if self._response_callbacks: ARCUtilsDeprecationWarning.warn( 'The use of CAS response callbacks is deprecated. Subclass CASBackend or' 'CASModelBackend and override the get_or_create_user() method instead.') log.debug('Calling CAS response callbacks...') for callback in self._response_callbacks: callback(cas_data) return self.get_or_create_user(cas_data) if cas_data else None def get_or_create_user(self, cas_data, **overrides): """Get user. ``cas_data`` must contain a 'username' key. If the corresponding user already exists, it will be returned as is; if it doesn't, a new user record will be created and returned. .. note:: The ``CAS.auto_create_user`` setting can be set to ``False`` to disable the auto-creation of users. ``overrides`` are passed through to :meth:`create_user`. """ user_model = get_user_model() username = cas_data['username'] try: return user_model.objects.get(username=username) except user_model.DoesNotExist: pass return self.create_user(cas_data, **overrides) if settings.get('auto_create_user') else None def create_user(self, cas_data, **overrides): """Create user from CAS data. This attempts to populate some user attributes from the CAS response: ``first_name``, ``last_name``, and ``email``. Any of those attributes that aren't found in the CAS response will be set to a default value on the new user object. The user's password is set to something unusable in the app's user table--i.e., we don't store passwords for CAS users in the app. If the ``STAFF`` setting is set, the user's ``is_staff`` flag will be set according to whether their username is in the ``STAFF`` list. If the ``SUPERUSER`` settings is set, the user's ``is_staff`` and ``is_superuser`` flags will be set according to whether their username is in the ``SUPERUSERS`` list. ``overrides`` can be passed as to override the values of *any* of the fields mentioned above or to set additional fields. This is useful in subclasses to avoid re-saving the user. .. note:: This method assumes a standard user model. It may not or may not work with custom user models. """ user_model = get_user_model() username = cas_data['username'] user_args = { 'email': '{username}@pdx.edu'.format(username=username), 'first_name': '', 'last_name': '', } for name, value in user_args.items(): if name in cas_data: user_args[name] = cas_data[name] staff = getattr(django_settings, 'STAFF', None) superusers = getattr(django_settings, 'SUPERUSERS', None) is_staff = bool(staff and username in staff) is_superuser = bool(superusers and username in superusers) user_args.update({ 'username': username, 'password': make_password(None), 'is_staff': is_staff or is_superuser, 'is_superuser': is_superuser, }) user_args.update(overrides) return user_model.objects.create(**user_args) def get_user(self, user_id): user_model = get_user_model() try: return user_model._default_manager.get(pk=user_id) except user_model.DoesNotExist: return None def _validate_ticket(self, ticket, service, suffix=None): path = settings.get('validate_path') params = {'ticket': ticket, 'service': service} url = make_cas_url(path, **params) log.debug('Validating CAS ticket: {url}'.format(url=url)) with urlopen(url) as fp: response = fp.read() tree = ElementTree.fromstring(response) log.debug('CAS response:\n%s', ElementTree.tostring(tree, encoding='unicode')) if tree is None: raise ValueError('Unexpected CAS response:\n{response}'.format(response=response)) success = tree_find(tree, 'cas:authenticationSuccess') if success: log.debug('CAS ticket validated: {url}'.format(url=url)) return parse_cas_tree(tree) else: message = 'CAS ticket not validated: {url}\n{detail}' failure = tree_find(tree, 'cas:authenticationFailure') if failure: detail = failure.text.strip() else: detail = ElementTree.tostring(tree, encoding='unicode').strip() detail = textwrap.indent(detail, ' ' * 4) log.error(message.format(url=url, detail=detail)) return None # Explicit @cached_property def _response_callbacks(self): callbacks = settings.get('response_callbacks', []) for i, cb in enumerate(callbacks): if isinstance(cb, str): callbacks[i] = import_string(cb) return callbacks class CASModelBackend(CASBackend, ModelBackend): """CAS/Model authentication backend. Use CASBackend's authenticate() method while also getting all the default permissions handling from ModelBackend. """
Python
0.000002
@@ -4417,21 +4417,8 @@ vice -, suffix=None ):%0A
a2849e7d016c812317fc503dc15f8f3dfec7da0a
use apply_async instead of delay
mrbelvedereci/build/handlers.py
mrbelvedereci/build/handlers.py
from django.db.models.signals import post_save from django.dispatch import receiver from mrbelvedereci.build.models import Build from mrbelvedereci.build.tasks import run_build @receiver(post_save, sender=Build) def create_repo_webhooks(sender, **kwargs): build = kwargs['instance'] created = kwargs['created'] if not created: return # Queue the background job with a 1 second delay to allow the transaction to commit run_build.delay(build.id, countdown=1)
Python
0.000001
@@ -451,21 +451,28 @@ n_build. -delay +apply_async( (build.i @@ -472,16 +472,17 @@ build.id +) , countd
e84b4a5c5b148423d450fe5e3d01310d92652841
rename dashboard url name to crm_dashboard
crm/urls.py
crm/urls.py
# -*- coding: utf-8 -*- # ---------------------------------------------------------------------------- # $Id: urls.py 425 2009-07-14 03:43:01Z tobias $ # ---------------------------------------------------------------------------- # # Copyright (C) 2008-2009 Caktus Consulting Group, LLC # # This file is part of django-crm and was originally extracted from minibooks. # # django-crm is published under a BSD-style license. # # You should have received a copy of the BSD License along with django-crm. # If not, see <http://www.opensource.org/licenses/bsd-license.php>. # from django.conf.urls.defaults import * from django.contrib.auth import views as auth_views import crm.views as views urlpatterns = patterns('', url(r'^dashboard/$', views.dashboard, name='dashboard'), url(r'^search/$', views.quick_search, name='quick_search'), url(r'^interaction/$', views.list_interactions, name='list_interactions'), url(r'^(?:person/(?P<person_id>\d+)/)?interaction/create/$', views.create_edit_interaction, name='create_interaction'), url(r'^interaction/(?P<interaction_id>\d+)/edit/$', views.create_edit_interaction, name='edit_interaction'), url(r'^interaction/(?P<interaction_id>\d+)/remove/$', views.remove_interaction, name='remove_interaction'), # TODO make these use flot (or something else) and re-enable # url(r'^hours/$', views.hours, name='hours'), # url(r'^graph/sum$',views.graph, kwargs={'type': 'developer_hours_sum', 'developer' : None }, name='graph_developer_hours_sum'), # url(r'^graph/commit_hours/(?P<developer>\w+)$', views.graph, kwargs={'type': 'commit_hours'}, name='graph_commit_hours'), # url(r'^graph/account/(?P<developer>\d+)$',views.graph, kwargs={'type': 'account'}, name='graph_account'), url(r'^person/search/$', views.quick_add_person, name='quick_add_person'), url(r'^person/list/$', views.list_people, name='list_people'), url(r'^person/create/$', views.create_edit_person, name='create_person'), url(r'^person/register/$', views.register_person, name='register_person'), url(r'^person/(?P<person_id>\d+)/$', views.view_person, name='view_person'), url(r'^person/(?P<person_id>\d+)/edit/$', views.create_edit_person, name='edit_person'), # businesses url(r'^business/list/$', views.list_businesses, name='list_businesses'), url( r'^business/(?P<business_id>\d+)/$', views.view_business, name='view_business', ), url( r'^business/create/$', views.create_edit_business, name='create_business', ), url( r'^business/(?P<business_id>\d+)/edit/$', views.create_edit_business, name='edit_business', ), # projects url(r'^project/list/$', views.list_projects, name='list_projects'), url( r'^business/(?P<business_id>\d+)/project/(?P<project_id>\d+)/$', views.view_project, name='view_project', ), url( r'^(?:business/(?P<business_id>\d+)/)?project/create/$', views.create_edit_project, name='create_project', ), url( r'^business/(?P<business_id>\d+)/project/(?P<project_id>\d+)/edit/$', views.create_edit_project, name='edit_project', ), url( r'^business/(?P<business_id>\d+)/(?:project/(?P<project_id>\d+)/)?(?:contact/(?P<user_id>\w+)/)?(?P<action>remove|add)/$', views.associate_contact, name='associate_contact', ), url( r'^business/(?P<business_id>\d+)/project/(?P<project_id>\d+)/contact/(?P<user_id>\w+)/edit/$', views.edit_project_relationship, name='edit_project_relationship', ), url(r'^book/(?P<file_name>[\w.]+)$', views.address_book, name='address_book'), )
Python
0.000025
@@ -787,16 +787,20 @@ , name=' +crm_ dashboar
1bb90728d9ef6c08452d2094e9930b6aa916389e
Remove use of girder.events in queue adapter
cumulus/queue/__init__.py
cumulus/queue/__init__.py
#!/usr/bin/env python # -*- coding: utf-8 -*- ############################################################################### # Copyright 2015 Kitware Inc. # # Licensed under the Apache License, Version 2.0 ( the "License" ); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ############################################################################### from jsonpath_rw import parse from girder import events from . import sge from . import pbs from . import slurm from . import newt from cumulus.constants import QueueType from cumulus.constants import ClusterType type_to_adapter = { QueueType.SGE: sge.SgeQueueAdapter, QueueType.PBS: pbs.PbsQueueAdapter, QueueType.SLURM: slurm.SlurmQueueAdapter, QueueType.NEWT: newt.NewtQueueAdapter } def get_queue_adapter(cluster, cluster_connection=None): global type_to_adapter # Special case for nersc clusters. They use SLURM ( at the moment ) but the # submission is done using the NEWT REST API. So the scheduler is set the # SLURM but we want to use the NEWT adapter. if cluster['type'] == ClusterType.NEWT: system = QueueType.NEWT else: system = parse('config.scheduler.type').find(cluster) if system: system = system[0].value # Default to SGE else: system = QueueType.SGE if system not in type_to_adapter: e = events.trigger('queue.adapter.get', system) if len(e.responses) > 0: cls = e.responses[-1] else: raise Exception('Unsupported queuing system: %s' % system) else: cls = type_to_adapter[system] return cls(cluster, cluster_connection) def is_valid_type(type): """ Return True if type is a valid (supported) queueing system, False otherwise. :param The queue type ( 'sge', 'slurm' ...) :returns """ valid = False if type in type_to_adapter: valid = True else: # See if this type is supported by a plugin e = events.trigger('queue.adapter.get', type) if len(e.responses) > 0: valid = True return valid
Python
0
@@ -822,35 +822,8 @@ se%0A%0A -from girder import events%0A%0A from @@ -1780,145 +1780,8 @@ er:%0A - e = events.trigger('queue.adapter.get', system)%0A if len(e.responses) %3E 0:%0A cls = e.responses%5B-1%5D%0A else:%0A @@ -1847,16 +1847,16 @@ system)%0A + else @@ -2138,28 +2138,14 @@ -valid = False%0A if +return typ @@ -2168,219 +2168,5 @@ pter -:%0A valid = True%0A else:%0A # See if this type is supported by a plugin%0A e = events.trigger('queue.adapter.get', type)%0A if len(e.responses) %3E 0:%0A valid = True%0A%0A return valid %0A
45b13b0cd235e10dc69f5fa2a9e65e704cebb043
fix silly bugs
crawler/crawler.py
crawler/crawler.py
from __future__ import absolute_import import re import bs4 import traceback import progressbar import itertools from requests_futures.sessions import FuturesSession from django.db import transaction from crawler.course import ( curriculum_to_trs, course_from_tr, syllabus_url, course_from_syllabus ) from data_center.models import Course, Department from data_center.const import week_dict, course_dict url = 'https://www.ccxp.nthu.edu.tw/ccxp/INQUIRE/JH/6/6.2/6.2.9/JH629002.php' dept_url = 'https://www.ccxp.nthu.edu.tw/ccxp/INQUIRE/JH/6/6.2/6.2.3/JH623002.php' # noqa YS = '104|10' cond = 'a' T_YEAR = 104 C_TERM = 10 MAX_WORKERS = 8 # max_workers for FuturesSession def dept_2_future(session, dept, ACIXSTORE, auth_num): return session.post( dept_url, data={ 'SEL_FUNC': 'DEP', 'ACIXSTORE': ACIXSTORE, 'T_YEAR': T_YEAR, 'C_TERM': C_TERM, 'DEPT': dept, 'auth_num': auth_num}) def cou_code_2_future(session, cou_code, ACIXSTORE, auth_num): return session.post( url, data={ 'ACIXSTORE': ACIXSTORE, 'YS': YS, 'cond': cond, 'cou_code': cou_code, 'auth_num': auth_num}) def save_syllabus(html, course): try: course_dict = course_from_syllabus(html) course.chi_title = course_dict['name_zh'] course.eng_title = course_dict['name_en'] course.teacher = course_dict['teacher'] course.room = course_dict['room'] course.syllabus = course_dict['syllabus'] course.save() except: print traceback.format_exc() print course return 'QAQ, what can I do?' def collect_class_info(tr, cou_code): course_dict = course_from_tr(tr) course, create = Course.objects.get_or_create(no=course_dict['no']) if cou_code not in course.code: course.code = '%s %s' % (course.code, cou_code) course.credit = course_dict['credit'] course.time = course_dict['time'] course.time_token = get_token(course_dict['time']) course.limit = course_dict['size_limit'] course.note = course_dict['note'] course.objective = course_dict['object'] course.prerequisite = course_dict['prerequisite'] course.ge = course_dict['ge_hint'] or '' course.save() return create def handle_curriculum_html(html, cou_code): cou_code_stripped = cou_code.strip() for tr in curriculum_to_trs(html): collect_class_info(tr, cou_code_stripped) def crawl_course(ACIXSTORE, auth_num, cou_codes): with FuturesSession(max_workers=MAX_WORKERS) as session: curriculum_futures = [ cou_code_2_future(session, cou_code, ACIXSTORE, auth_num) for cou_code in cou_codes ] progress = progressbar.ProgressBar() for future, cou_code in progress( itertools.izip(curriculum_futures, cou_codes) ): response = future.result() response.encoding = 'cp950' handle_curriculum_html(response.text, cou_code) print 'Crawling syllabus...' course_list = list(Course.objects.all()) with FuturesSession(max_workers=MAX_WORKERS) as session: course_futures = [ session.get( syllabus_url, params={ 'c_key': course.no, 'ACIXSTORE': ACIXSTORE, } ) for course in course_list ] progress = progressbar.ProgressBar(maxval=len(course_list)) with transaction.atomic(): for future, course in progress( course_futures, course_list) ): response = future.result() response.encoding = 'cp950' save_syllabus(response.text, course) print 'Total course information: %d' % Course.objects.count() def handle_dept_html(html): soup = bs4.BeautifulSoup(html, 'html.parser') divs = soup.find_all('div', class_='newpage') for div in divs: # Get something like ``EE 103BA`` dept_name = div.find_all('font')[0].get_text().strip() dept_name = dept_name.replace('B A', 'BA') dept_name = dept_name.replace('B B', 'BB') try: dept_name = re.search('\((.*?)\)', dept_name).group(1) except: # For all student (Not important for that dept.) continue trs = div.find_all('tr', bgcolor="#D8DAEB") department = Department.objects.get_or_create( dept_name=dept_name)[0] for tr in trs: tds = tr.find_all('td') cou_no = tds[0].get_text() try: course = Course.objects.get(no__contains=cou_no) department.required_course.add(course) except: print cou_no, 'gg' department.save() def crawl_dept(ACIXSTORE, auth_num, dept_codes): with FuturesSession(max_workers=MAX_WORKERS) as session: future_depts = [ dept_2_future(session, dept_code, ACIXSTORE, auth_num) for dept_code in dept_codes ] progress = progressbar.ProgressBar() with transaction.atomic(): for future in progress(future_depts): response = future.result() response.encoding = 'cp950' handle_dept_html(response.text) print 'Total department information: %d' % Department.objects.count() def get_token(s): try: return week_dict[s[0]] + course_dict[s[1]] + s[2:] except: return ''
Python
0.000102
@@ -2827,24 +2827,45 @@ ProgressBar( +maxval=len(cou_codes) )%0A for fu @@ -3631,32 +3631,55 @@ rse in progress( +itertools.izip_longest( %0A @@ -3706,17 +3706,16 @@ rse_list -) %0A @@ -3715,24 +3715,25 @@ +) ):%0A
e8ea3fb530b26adbcf3fa2ba602e3e96a91c5736
Create a method to render duration in "H:m:s" format.
base/components/social/youtube/models.py
base/components/social/youtube/models.py
# -*- coding: utf-8 -*- import datetime import dateutil from django.db import models from django.utils.encoding import smart_unicode from components.people.models import Group, Idol from .api import Api class Channel(models.Model): username = models.CharField(max_length=60) # Optional relationships. idol = models.OneToOneField(Idol, blank=True, null=True, related_name='%(class)s') group = models.OneToOneField(Group, blank=True, null=True, related_name='%(class)s') def __unicode__(self): return u'%s' % (self.username) def save(self, *args, **kwargs): super(Channel, self).save(*args, **kwargs) from .tasks import fetch_all_videos fetch_all_videos.delay(self.pk) def entries(self): api = Api() return api.fetch_all_videos_by_username(self.username) def latest_entries(self): api = Api() return api.fetch_latest_videos_by_username(self.username) class Video(models.Model): ytid = models.CharField('YouTube ID', max_length=200, primary_key=True, unique=True) channel = models.ForeignKey(Channel, related_name='videos') # Metadata. title = models.CharField(blank=True, max_length=200) description = models.TextField(blank=True) published = models.DateTimeField(blank=True, null=True) duration = models.IntegerField(blank=True, null=True) flash_url = models.URLField('flash URL', blank=True) watch_url = models.URLField('watch URL', blank=True) def __unicode__(self): return u'%s' % (self.title) def get_absolute_url(self): return self.watch_url def save(self, *args, **kwargs): # Connect to API and get the details. entry = self.entry() # Set the details. self.title = smart_unicode(entry.media.title.text) self.description = entry.media.description.text self.published = dateutil.parser.parse(entry.published.text) self.duration = entry.media.duration.seconds self.flash_url = entry.GetSwfUrl() self.watch_url = entry.media.player.url super(Video, self).save(*args, **kwargs) # Save the thumbnails. for thumbnail in entry.media.thumbnail: t = Thumbnail.objects.get_or_create(video=self, url=thumbnail.url) t.save() def entry(self): api = Api() return api.fetch_video(self.ytid) class Thumbnail(models.Model): video = models.ForeignKey(Video, null=True, related_name='thumbnails') url = models.URLField('URL') def __unicode__(self): return u'%s (for %s)' % (self.url, self.video.title) def get_absolute_url(self): return self.url
Python
0
@@ -2306,24 +2306,119 @@ t.save()%0A%0A + def duration_display(self):%0A return str(datetime.timedelta(seconds=self.duration))%0A%0A def entr
580bdccc23b319a8becda0fd312d586b3c5a68d1
Update `salt.modules.config` to use `salt.utils.fopen` instead of `open`
salt/modules/config.py
salt/modules/config.py
''' Return config information ''' # Import python libs import re import os import urllib # Import salt libs import salt.utils # Set up the default values for all systems DEFAULTS = {'mongo.db': 'salt', 'mongo.host': 'salt', 'mongo.password': '', 'mongo.port': 27017, 'mongo.user': '', 'redis.db': '0', 'redis.host': 'salt', 'redis.port': 6379, 'test.foo': 'unconfigured', 'ca.cert_base_path': '/etc/pki', 'solr.cores': [], 'solr.host': 'localhost', 'solr.port': '8983', 'solr.baseurl': '/solr', 'solr.type': 'master', 'solr.request_timeout': None, 'solr.init_script': '/etc/rc.d/solr', 'solr.dih.import_options': {'clean': False, 'optimize': True, 'commit': True, 'verbose': False}, 'solr.backup_path': None, 'solr.num_backups': 1, 'poudriere.config': '/usr/local/etc/poudriere.conf', 'poudriere.config_dir': '/usr/local/etc/poudriere.d', 'ldap.server': 'localhost', 'ldap.port': '389', 'ldap.tls': False, 'ldap.scope': 2, 'ldap.attrs': None, 'ldap.binddn': '', 'ldap.bindpw': '', 'hosts.file': '/etc/hosts', 'aliases.file': '/etc/aliases', 'virt.images': '/srv/salt-images', 'virt.tunnel': False, } def backup_mode(backup=''): ''' Return the backup mode CLI Example:: salt '*' config.backup_mode ''' if backup: return backup return option('backup_mode') def manage_mode(mode): ''' Return a mode value, normalized to a string CLI Example:: salt '*' config.manage_mode ''' if mode is None: return None return str(mode).lstrip('0').zfill(3) def valid_fileproto(uri): ''' Returns a boolean value based on whether or not the URI passed has a valid remote file protocol designation CLI Example:: salt '*' config.valid_fileproto salt://path/to/file ''' try: return bool(re.match('^(?:salt|https?|ftp)://', uri)) except Exception: return False def option( value, default='', omit_opts=False, omit_master=False, omit_pillar=False): ''' Pass in a generic option and receive the value that will be assigned CLI Example:: salt '*' config.option redis.host ''' if not omit_opts: if value in __opts__: return __opts__[value] if not omit_master: if value in __pillar__.get('master', {}): return __pillar__['master'][value] if not omit_pillar: if value in __pillar__: return __pillar__[value] if value in DEFAULTS: return DEFAULTS[value] return default def get(key, default=''): ''' .. versionadded: 0.14 Attempt to retrieve the named value from opts, pillar, grains of the master config, if the named value is not available return the passed default. The default return is an empty string. The value can also represent a value in a nested dict using a ":" delimiter for the dict. This means that if a dict looks like this: {'pkg': {'apache': 'httpd'}} To retrieve the value associated with the apache key in the pkg dict this key can be passed: pkg:apache This routine traverses these data stores in this order: Local minion config (opts) Minion's grains Minion's pillar Master config CLI Example:: salt '*' config.get pkg:apache ''' ret = salt.utils.traverse_dict(__opts__, key, '_|-') if ret != '_|-': return ret ret = salt.utils.traverse_dict(__grains__, key, '_|-') if ret != '_|-': return ret ret = salt.utils.traverse_dict(__pillar__, key, '_|-') if ret != '_|-': return ret ret = salt.utils.traverse_dict(__pillar__.get('master', {}), key, '_|-') if ret != '_|-': return ret return default def dot_vals(value): ''' Pass in a configuration value that should be preceded by the module name and a dot, this will return a list of all read key/value pairs CLI Example:: salt '*' config.dot_vals host ''' ret = {} for key, val in __pillar__.get('master', {}).items(): if key.startswith('{0}.'.format(value)): ret[key] = val for key, val in __opts__.items(): if key.startswith('{0}.'.format(value)): ret[key] = val return ret def gather_bootstrap_script(replace=False): ''' Download the salt-bootstrap script, set replace to True to refresh the script if it has already been downloaded CLI Example:: salt '*' qemu.gather_bootstrap_script True ''' fn_ = os.path.join(__opts__['cachedir'], 'bootstrap.sh') if not replace and os.path.isfile(fn_): return fn_ with open(fn_, 'w+') as fp_: fp_.write(urllib.urlopen('http://bootstrap.saltstack.org').read()) return fn_
Python
0
@@ -5093,16 +5093,28 @@ with +salt.utils.f open(fn_
39cedb77312faa5afe46409d996b8dec587e6cf7
use simplier gridder
scripts/iemre/grid_p01d_12z_pre1997.py
scripts/iemre/grid_p01d_12z_pre1997.py
"""Make a gridded analysis of p01d_12z based on obs.""" import sys import subprocess import datetime import numpy as np import verde as vd import pyproj from metpy.units import units as mpunits from metpy.units import masked_array from pandas.io.sql import read_sql from pyiem.iemre import get_grids, XAXIS, YAXIS, set_grids from pyiem.util import get_dbconn def generic_gridder(day, df, idx): """ Generic gridding algorithm for easy variables """ data = df[idx].values coordinates = (df["lon"].values, df["lat"].values) region = [XAXIS[0], XAXIS[-1], YAXIS[0], YAXIS[-1]] projection = pyproj.Proj(proj="merc", lat_ts=df["lat"].mean()) spacing = 0.5 chain = vd.Chain( [ ("mean", vd.BlockReduce(np.mean, spacing=spacing * 111e3)), ("spline", vd.Spline(damping=1e-10, mindist=100e3)), ] ) train, test = vd.train_test_split( projection(*coordinates), data, random_state=0 ) chain.fit(*train) score = chain.score(*test) shape = (len(YAXIS), len(XAXIS)) grid = chain.grid( region=region, shape=shape, projection=projection, dims=["latitude", "longitude"], data_names=["precip"], ) res = grid.to_array() res = np.ma.where(res < 0, 0, res) print( ("%s %s rows for %s column min:%.3f max:%.3f score: %.3f") % (day, len(df.index), idx, np.nanmin(res), np.nanmax(res), score) ) return masked_array(res, mpunits("inch")) def main(argv): """Do work please""" day = datetime.date(int(argv[1]), int(argv[2]), int(argv[3])) pgconn = get_dbconn("coop") df = read_sql( """ SELECT case when a.precip < 0.005 then 0 else a.precip end as precip, st_x(t.geom) as lon, st_y(t.geom) as lat from alldata a JOIN stations t ON (a.station = t.id) WHERE a.day = %s and t.network ~* 'CLIMATE' and substr(a.station,3,4) != '0000' and substr(station,3,1) not in ('C', 'T') and precip >= 0 and precip < 50 """, pgconn, params=(day,), ) res = generic_gridder(day, df, "precip") if res is not None: ds = get_grids(day, varnames="p01d_12z") ds["p01d_12z"].values = res.to(mpunits("mm")).magnitude[0, :, :] set_grids(day, ds) subprocess.call( "python db_to_netcdf.py %s" % (day.strftime("%Y %m %d"),), shell=True, ) if __name__ == "__main__": main(sys.argv)
Python
0.000001
@@ -813,42 +813,36 @@ vd.S -pline(damping=1e-10, mindist=100e3 +cipyGridder(method=%22nearest%22 )),%0A
f9fb155ab3ffb8831f90526faf0397c5d20fdf46
Use internal_prod
cupy/manipulation/dims.py
cupy/manipulation/dims.py
import six import cupy from cupy import core zip_longest = six.moves.zip_longest six_zip = six.moves.zip def atleast_1d(*arys): """Converts arrays to arrays with dimensions >= 1. Args: arys (tuple of arrays): Arrays to be converted. All arguments must be cupy.ndarray objects. Only zero-dimensional array is affected. Returns: If there are only one input, then it returns its converted version. Otherwise, it returns a list of converted arrays. .. seealso:: :func:`numpy.atleast_1d` """ res = [] for a in arys: if not isinstance(a, cupy.ndarray): raise TypeError('Only cupy arrays can be atleast_1d') if a.ndim == 0: a = a.reshape(1) res.append(a) if len(res) == 1: res = res[0] return res def atleast_2d(*arys): """Converts arrays to arrays with dimensions >= 2. If an input array has dimensions less than two, then this function inserts new axes at the head of dimensions to make it have two dimensions. Args: arys (tuple of arrays): Arrays to be converted. All arguments must be cupy.ndarray objects. Returns: If there are only one input, then it returns its converted version. Otherwise, it returns a list of converted arrays. .. seealso:: :func:`numpy.atleast_2d` """ res = [] for a in arys: if not isinstance(a, cupy.ndarray): raise TypeError('Only cupy arrays can be atleast_2d') if a.ndim == 0: a = a.reshape(1, 1) elif a.ndim == 1: a = a[cupy.newaxis, :] res.append(a) if len(res) == 1: res = res[0] return res def atleast_3d(*arys): """Converts arrays to arrays with dimensions >= 3. If an input array has dimensions less than three, then this function inserts new axes to make it have three dimensions. The place of the new axes are following: - If its shape is ``()``, then the shape of output is ``(1, 1, 1)``. - If its shape is ``(N,)``, then the shape of output is ``(1, N, 1)``. - If its shape is ``(M, N)``, then the shape of output is ``(M, N, 1)``. - Otherwise, the output is the input array itself. Args: arys (tuple of arrays): Arrays to be converted. All arguments must be cupy.ndarray objects. Returns: If there are only one input, then it returns its converted version. Otherwise, it returns a list of converted arrays. .. seealso:: :func:`numpy.atleast_3d` """ res = [] for a in arys: if not isinstance(a, cupy.ndarray): raise TypeError('Only cupy arrays can be atleast_3d') if a.ndim == 0: a = a.reshape(1, 1, 1) elif a.ndim == 1: a = a[cupy.newaxis, :, cupy.newaxis] elif a.ndim == 2: a = a[:, :, cupy.newaxis] res.append(a) if len(res) == 1: res = res[0] return res broadcast = core.broadcast def broadcast_arrays(*args): """Broadcasts given arrays. Args: args (tuple of arrays): Arrays to broadcast for each other. Returns: list: A list of broadcasted arrays. .. seealso:: :func:`numpy.broadcast_arrays` """ return broadcast(*args).values def broadcast_to(array, shape, subok=False): rev = slice(None, None, -1) shape_arr = array._shape[rev] r_shape = [max(ss) for ss in zip_longest(shape_arr, shape[rev], fillvalue=0)] r_strides = [ a_st if sh == a_sh else (0 if a_sh == 1 else None) for sh, a_sh, a_st in six_zip(r_shape, array._shape[rev], array._strides[rev])] if None in r_strides: raise ValueError('Broadcasting failed') offset = (0,) * (len(r_shape) - len(r_strides)) view = array.view() view._shape = shape view._strides = offset + tuple(r_strides[rev]) view._size = internal.prod(r_shape) view._c_contiguous = -1 view._f_contiguous = -1 return view def expand_dims(a, axis): """Expands given arrays. Args: a (cupy.ndarray): Array to be expanded. axis (int): Position where new axis is to be inserted. Returns: cupy.ndarray: The number of dimensions is one greater than that of the input array. .. seealso:: :func:`numpy.expand_dims` """ # TODO(okuta): check type shape = a.shape if axis < 0: axis = axis + len(shape) + 1 return a.reshape(shape[:axis] + (1,) + shape[axis:]) def squeeze(a, axis=None): """Removes size-one axes from the shape of an array. Args: a (cupy.ndarray): Array to be reshaped. axis (int or tuple of ints): Axes to be removed. This function removes all size-one axes by default. If one of the specified axes is not of size one, an exception is raised. Returns: cupy.ndarray: An array without (specified) size-one axes. .. seealso:: :func:`numpy.squeeze` """ # TODO(okuta): check type return a.squeeze(axis)
Python
0.000001
@@ -3945,16 +3945,21 @@ e = +core. internal .pro @@ -3954,17 +3954,17 @@ internal -. +_ prod(r_s
707c4c801a0c35a1503575a6bd8c82fed6c589b6
Update tv example to use data module. Rewrap some text.
doc/examples/plot_lena_tv_denoise.py
doc/examples/plot_lena_tv_denoise.py
""" ==================================================== Denoising the picture of Lena using total variation ==================================================== In this example, we denoise a noisy version of the picture of Lena using the total variation denoising filter. The result of this filter is an image that has a minimal total variation norm, while being as close to the initial image as possible. The total variation is the L1 norm of the gradient of the image, and minimizing the total variation typically produces "posterized" images with flat domains separated by sharp edges. It is possible to change the degree of posterization by controlling the tradeoff between denoising and faithfulness to the original image. """ import numpy as np import scipy from scipy import ndimage import matplotlib.pyplot as plt from scikits.image.filter import tv_denoise l = scipy.misc.lena() l = l[230:290, 220:320] noisy = l + 0.4*l.std()*np.random.random(l.shape) tv_denoised = tv_denoise(noisy, weight=10) plt.figure(figsize=(12,2.8)) plt.subplot(131) plt.imshow(noisy, cmap=plt.cm.gray, vmin=40, vmax=220) plt.axis('off') plt.title('noisy', fontsize=20) plt.subplot(132) plt.imshow(tv_denoised, cmap=plt.cm.gray, vmin=40, vmax=220) plt.axis('off') plt.title('TV denoising', fontsize=20) tv_denoised = tv_denoise(noisy, weight=50) plt.subplot(133) plt.imshow(tv_denoised, cmap=plt.cm.gray, vmin=40, vmax=220) plt.axis('off') plt.title('(more) TV denoising', fontsize=20) plt.subplots_adjust(wspace=0.02, hspace=0.02, top=0.9, bottom=0, left=0, right=1) plt.show()
Python
0
@@ -218,25 +218,25 @@ ture of Lena - +%0A using the%0Ato @@ -232,17 +232,17 @@ sing the -%0A + total va @@ -292,17 +292,17 @@ s filter - +%0A is an im @@ -309,17 +309,17 @@ age that -%0A + has a mi @@ -360,17 +360,17 @@ being as - +%0A close to @@ -387,17 +387,17 @@ al image -%0A + as possi @@ -430,17 +430,17 @@ s the L1 - +%0A norm of @@ -465,17 +465,17 @@ e image, -%0A + and mini @@ -500,17 +500,17 @@ ariation - +%0A typicall @@ -544,17 +544,17 @@ ges with -%0A + flat dom @@ -570,17 +570,17 @@ rated by - +%0A sharp ed @@ -656,13 +656,13 @@ ling - the%0A +%0Athe trad @@ -760,71 +760,64 @@ ort -scipy%0Afrom scipy import nd +matplotlib.pyplot as plt%0A%0Afrom scikits. image -%0A + import -matplotlib.pyplot as plt +data %0Afro @@ -866,18 +866,12 @@ l = -scipy.misc +data .len @@ -1539,34 +1539,9 @@ t=0, - %0A +%0A
584317c7b5e8536eb0aa4203cd7ff0287d581367
Refactor configuration parsing
blimp/commands/launch.py
blimp/commands/launch.py
import time import boto3 from clint.textui import indent, puts, puts_err from helpers.aws import json_serialize_instance def _get_launch_args_and_tags(args, config): role_config = config['roles'][args.role] launch_args = { 'ImageId': role_config['ami_id'], 'MinCount': 1, 'MaxCount': 1, 'KeyName': config['key_pair'], 'SecurityGroupIds': role_config['security_group_ids'], 'InstanceType': role_config['instance_type'], 'Monitoring': { 'Enabled': role_config['monitoring'], }, 'SubnetId': config['network']['availability_zones'][args.availability_zone]['subnet_id'], 'InstanceInitiatedShutdownBehavior': 'stop', 'EbsOptimized': role_config['ebs_optimized'], } if args.private_ip_address: launch_args['PrivateIpAddress'] = args.private_ip_address if 'iam_instance_profile_arn' in role_config: launch_args['IamInstanceProfile'] = { 'Arn': role_config['iam_instance_profile_arn'], } if 'block_device_mappings' in role_config: launch_args['BlockDeviceMappings'] = role_config['block_device_mappings'] tags = [{ 'Key': 'Name', 'Value': args.hostname, }] for tag in role_config.get('tags', []): for k in tag.keys(): tags.append({'Key': k, 'Value': tag[k]}) return launch_args, tags def launch(args, config): ec2 = boto3.resource('ec2') launch_config, tags = _get_launch_args_and_tags(args, config) instance = ec2.create_instances(**launch_config)[0] puts_err('New instance id: {}'.format(instance.id)) with indent(4): while instance.state['Code'] is not 16: puts_err("Instance state:{}, sleeping for five seconds".format(instance.state['Name'])) time.sleep(5) instance.load() puts_err('Tagging {} with the name {}'.format(instance.id, args.hostname)) instance.create_tags(Tags=tags) puts(json_serialize_instance(instance)) def _register_launch(subparsers): parser_launch = subparsers.add_parser('launch', help='launch help') parser_launch.add_argument('role', type=str, help='Role of the EC2 instance to launch') parser_launch.add_argument('-a', '--availability-zone', type=str, required=True, help='Availability zone to launch in') parser_launch.add_argument('-n', '--hostname', type=str, required=True, help='Hostname of the new host') parser_launch.add_argument('-i', '--private-ip-address', type=str, help='Private ip address to assign')
Python
0
@@ -171,397 +171,107 @@ -role_config = config%5B'roles'%5D%5Bargs.role%5D%0A%0A launch_args = %7B%0A 'ImageId': role_config%5B'ami_id'%5D,%0A 'MinCount': 1,%0A 'MaxCount': 1,%0A 'KeyName': config%5B'key_pair'%5D,%0A 'SecurityGroupIds': role_config%5B'security_group_ids'%5D,%0A 'InstanceType': role_config%5B'instance_type'%5D,%0A 'Monitoring': %7B%0A 'Enabled': role_config%5B'monitoring'%5D,%0A %7D +launch_args = %7B%0A 'MinCount': 1,%0A 'MaxCount': 1,%0A 'KeyName': config%5B'key_pair'%5D ,%0A @@ -427,457 +427,234 @@ +%7D%0A%0A -'EbsOptimized': role_config%5B'ebs_optimized'%5D,%0A %7D%0A%0A if args.private_ip_address:%0A launch_args%5B'PrivateIpAddress'%5D = args.private_ip_address%0A if 'iam_instance_profile_arn' in role_config:%0A launch_args%5B'IamInstanceProfile'%5D = %7B%0A 'Arn': role_config%5B'iam_instance_profile_arn'%5D,%0A %7D%0A if 'block_device_mappings' in role_config:%0A launch_args%5B'BlockDeviceMappings'%5D = role_config%5B'block_device_mappings'%5D +role_config = config%5B'roles'%5D%5Bargs.role%5D%0A role_tags = role_config.pop('Tags', %5B%5D)%0A launch_args.update(role_config)%0A%0A if args.private_ip_address:%0A launch_args%5B'PrivateIpAddress'%5D = args.private_ip_address %0A%0A @@ -752,30 +752,12 @@ ole_ -config.get('tags', %5B%5D) +tags :%0A
507cdda01f9208127f8ce5f1ecadc6d5d521fe4d
fix for flake8
cupy/manipulation/kind.py
cupy/manipulation/kind.py
import numpy import cupy from cupy import core # TODO(okuta): Implement asfarray def asfortranarray(a, dtype=None): """Return an array laid out in Fortran order in memory. Args: a (~cupy.ndarray): The input array. dtype (str or dtype object, optional): By default, the data-type is inferred from the input data. Returns: ~cupy.ndarray: The input `a` in Fortran, or column-major, order. .. seealso:: :func:`numpy.asfortranarray` """ return core.asfortranarray(a, dtype) # TODO(okuta): Implement asarray_chkfinite # TODO(okuta): Implement asscalar # TODO(okuta): Implement require
Python
0
@@ -1,30 +1,4 @@ -import numpy%0A%0Aimport cupy%0A from
e157cfbf85bab3373ef7b4e5e76da20bd572bebb
modify method name: get_by_name_or_all to get_artist_by_name refactoring methods
art_archive_api/utils.py
art_archive_api/utils.py
from flask import abort def get_by_name_or_all(model, name): objects = [] objects_count = 0 if name: objects = model.query.filter_by(name=name) objects_count = objects.count() else: objects = model.query.all() objects_count = model.query.count() return objects, objects_count def slice_query_set(offset, count, objects_count, objects): if offset >= 0 and objects_count > offset: if count: count += offset else: count = objects_count objects = objects[offset:count] def serialize_artist(artist, images_detail): json_data = {} if images_detail: json_data = artist.serialize_with_images() else: json_data = artist.serialize() return json_data def get_or_abort(model, object_id, code=422): """ get an object with his given id or an abort error (422 is the default) """ result = model.query.get(object_id) return result or abort(code)
Python
0.000002
@@ -31,22 +31,22 @@ get_ +artist_ by_name -_or_all (mod @@ -76,30 +76,8 @@ = %5B%5D -%0A objects_count = 0 %0A%0A @@ -142,48 +142,8 @@ me)%0A - objects_count = objects.count()%0A @@ -181,19 +181,9 @@ uery -.all()%0A +%0A @@ -198,27 +198,23 @@ count = -model.query +objects .count() @@ -489,16 +489,35 @@ t:count%5D +%0A return objects %0A%0Adef se
9f44888c00d29bd1d1a53eb09ab90b61f33c5e05
Update existing settings migration with minor field change.
awx/main/migrations/0002_v300_changes.py
awx/main/migrations/0002_v300_changes.py
# -*- coding: utf-8 -*- # Copyright (c) 2016 Ansible, Inc. # All Rights Reserved. from __future__ import unicode_literals from django.db import migrations, models from django.conf import settings class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('main', '0001_initial'), ] operations = [ migrations.CreateModel( name='TowerSettings', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('created', models.DateTimeField(default=None, editable=False)), ('modified', models.DateTimeField(default=None, editable=False)), ('key', models.CharField(unique=True, max_length=255)), ('description', models.TextField()), ('category', models.CharField(max_length=128)), ('value', models.TextField()), ('value_type', models.CharField(max_length=12, choices=[(b'string', 'String'), (b'int', 'Integer'), (b'float', 'Decimal'), (b'json', 'JSON'), (b'bool', 'Boolean'), (b'password', 'Password'), (b'list', 'List')])), ('user', models.ForeignKey(related_name='settings', default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)), ], ), ]
Python
0
@@ -971,32 +971,42 @@ odels.TextField( +blank=True )),%0A
0f5e2cd04b0518a846e898214b484b43e1b051d9
Add tests
tests/cupyx_tests/jit_tests/test_raw.py
tests/cupyx_tests/jit_tests/test_raw.py
import unittest import numpy import cupy from cupyx import jit from cupy import testing class TestRaw(unittest.TestCase): def test_raw_onw_thread(self): @jit.rawkernel() def f(x, y): y[0] = x[0] x = cupy.array([10], dtype=numpy.int32) y = cupy.array([20], dtype=numpy.int32) f((1,), (1,), (x, y)) assert int(y[0]) == 10 def test_raw_elementwise_single_op(self): @jit.rawkernel() def f(x, y): tid = jit.threadIdx.x + jit.blockDim.x * jit.blockIdx.x y[tid] = x[tid] x = testing.shaped_random((30,), dtype=numpy.int32, seed=0) y = testing.shaped_random((30,), dtype=numpy.int32, seed=1) f((5,), (6,), (x, y)) assert bool((x == y).all()) def test_raw_elementwise_loop(self): @jit.rawkernel() def f(x, y, size): tid = jit.threadIdx.x + jit.blockDim.x * jit.blockIdx.x ntid = jit.blockDim.x * jit.gridDim.x for i in range(tid, size, ntid): y[i] = x[i] x = testing.shaped_random((1024,), dtype=numpy.int32, seed=0) y = testing.shaped_random((1024,), dtype=numpy.int32, seed=1) f((5,), (6,), (x, y, numpy.uint32(1024))) assert bool((x == y).all()) def test_raw_multidimensional_array(self): @jit.rawkernel() def f(x, y, n_row, n_col): tid = jit.threadIdx.x + jit.blockDim.x * jit.blockIdx.x ntid = jit.blockDim.x * jit.gridDim.x size = n_row * n_col for i in range(tid, size, ntid): i_row = i // n_col i_col = i % n_col y[i_row, i_col] = x[i_row, i_col] n, m = numpy.uint32(12), numpy.uint32(13) x = testing.shaped_random((n, m), dtype=numpy.int32, seed=0) y = testing.shaped_random((n, m), dtype=numpy.int32, seed=1) f((5,), (6,), (x, y, n, m)) assert bool((x == y).all()) def test_raw_0dim_array(self): @jit.rawkernel() def f(x, y): y[()] = x[()] x = testing.shaped_random((), dtype=numpy.int32, seed=0) y = testing.shaped_random((), dtype=numpy.int32, seed=1) f((1,), (1,), (x, y)) assert bool((x == y).all()) def test_syncthreads(self): @jit.rawkernel() def f(x, y, buf): tid = jit.threadIdx.x + jit.threadIdx.y * jit.blockDim.x ntid = jit.blockDim.x * jit.blockDim.y buf[tid] = x[ntid - tid - 1] jit.syncthreads() y[tid] = buf[ntid - tid - 1] x = testing.shaped_random((1024,), dtype=numpy.int32, seed=0) y = testing.shaped_random((1024,), dtype=numpy.int32, seed=1) buf = testing.shaped_random((1024,), dtype=numpy.int32, seed=2) f((1,), (32, 32), (x, y, buf)) assert bool((x == y).all()) def test_raw_grid_block_interface(self): @jit.rawkernel() def f(x, y, size): tid = jit.threadIdx.x + jit.blockDim.x * jit.blockIdx.x ntid = jit.blockDim.x * jit.gridDim.x for i in range(tid, size, ntid): y[i] = x[i] x = testing.shaped_random((1024,), dtype=numpy.int32, seed=0) y = testing.shaped_random((1024,), dtype=numpy.int32, seed=1) f[5, 6](x, y, numpy.uint32(1024)) assert bool((x == y).all())
Python
0.000001
@@ -2872,32 +2872,671 @@ x == y).all())%0A%0A + def test_shared_memory(self):%0A @jit.rawkernel()%0A def f(x, y):%0A tid = jit.threadIdx.x%0A ntid = jit.blockDim.x%0A bid = jit.blockIdx.x%0A i = tid + bid * ntid%0A%0A smem = jit.shared_malloc(numpy.int32, 32)%0A smem%5Btid%5D = x%5Bi%5D%0A jit.syncthreads()%0A y%5Bi%5D = smem%5Bntid - tid - 1%5D%0A%0A x = testing.shaped_random((1024,), dtype=numpy.int32, seed=0)%0A y = testing.shaped_random((1024,), dtype=numpy.int32, seed=1)%0A f((32,), (32,), (x, y))%0A expected = x.reshape(32, 32)%5B:, ::-1%5D.ravel()%0A assert bool((y == expected).all())%0A%0A def test_raw
f3999c68e0494a08678f1bd6b4fb9e6ad031df92
Fix Flakes Errors: openspending/test/unit/model/helpers.py
openspending/test/unit/model/helpers.py
openspending/test/unit/model/helpers.py
from tempfile import mkdtemp SIMPLE_MODEL = { 'dataset': { 'name': 'test', 'label': 'Test Case Model', 'description': 'I\'m a banana!' }, 'mapping': { 'amount': { 'type': 'value', 'label': 'Amount', 'column': 'amount', 'datatype': 'float' }, 'time': { 'type': 'value', 'label': 'Year', 'column': 'year', 'datatype': 'date', 'key': True }, 'field': { 'type': 'value', 'label': 'Field 1', 'column': 'field', 'datatype': 'string' }, 'to': { 'label': 'Einzelplan', 'type': 'entity', 'facet': True, 'key': True, 'attributes': { 'name': {'column': 'to_name', 'datatype': 'id'}, 'label': {'column': 'to_label', 'datatype': 'string'} } }, 'function': { 'label': 'Function code', 'type': 'classifier', 'taxonomy': 'funny', 'facet': False, 'key': True, 'attributes': { 'name': {'column': 'func_name', 'datatype': 'id'}, 'label': {'column': 'func_label', 'datatype': 'string'} } } } } TEST_DATA="""year,amount,field,to_name,to_label,func_name,func_label 2010,200,foo,"bcorp","Big Corp",food,Food & Nutrition 2009,190,bar,"bcorp","Big Corp",food,Food & Nutrition 2010,500,foo,"acorp","Another Corp",food,Food & Nutrition 2009,900,qux,"acorp","Another Corp",food,Food & Nutrition 2010,300,foo,"ccorp","Central Corp",school,Schools & Education 2009,600,qux,"ccorp","Central Corp",school,Schools & Education """ def load_dataset(dataset): from StringIO import StringIO import csv from openspending.validation.data import convert_types reader = csv.DictReader(StringIO(TEST_DATA)) for row in reader: row = convert_types(SIMPLE_MODEL['mapping'], row) dataset.load(row) #def make_test_app(use_cookies=False): # web.app.config['TESTING'] = True # web.app.config['SITE_ID'] = '$$$TEST$$$' # web.app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///:memory:' # web.app.config['STAGING_DATA_PATH'] = mkdtemp() # core.db.create_all() # return web.app.test_client(use_cookies=use_cookies) def tear_down_test_app(): pass # core.db.session.rollback() # core.db.drop_all()
Python
0.000078
@@ -1,34 +1,4 @@ -from tempfile import mkdtemp%0A%0A SIMP
7893695348a23472835e6d6c2d57b8ac4dea2dc3
Document test intention.
test/broker/03-publish-timeout-qos2.py
test/broker/03-publish-timeout-qos2.py
#!/usr/bin/python # Test whether a PUBLISH to a topic with QoS 2 results in the correct packet # flow. This test introduces delays into the flow in order to force the broker # to send duplicate PUBREC and PUBCOMP messages. import subprocess import socket import time from struct import * rc = 0 keepalive = 600 connect_packet = pack('!BBH6sBBHH21s', 16, 12+2+21,6,"MQIsdp",3,2,keepalive,21,"pub-qos2-timeout-test") connack_packet = pack('!BBBB', 32, 2, 0, 0); mid = 1926 publish_packet = pack('!BBH13sH15s', 48+4, 2+13+2+15, 13, "pub/qos2/test", mid, "timeout-message") pubrec_packet = pack('!BBH', 80, 2, mid) pubrec_dup_packet = pack('!BBH', 80+8, 2, mid) pubrel_packet = pack('!BBH', 96, 2, mid) pubcomp_packet = pack('!BBH', 112, 2, mid) broker = subprocess.Popen(['../../src/mosquitto', '-c', '03-publish-timeout-qos2.conf']) try: time.sleep(0.1) sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.settimeout(8) # 8 seconds timeout is longer than 5 seconds message retry. sock.connect(("localhost", 1888)) sock.send(connect_packet) connack_recvd = sock.recv(256) if connack_recvd != connack_packet: print "FAIL: Connect failed." rc = 1 else: sock.send(publish_packet) pubrec_recvd = sock.recv(256) if pubrec_recvd != pubrec_packet: (cmd, rl, mid_recvd) = unpack('!BBH', pubrec_recvd) print "FAIL: Expected 80,2," + str(mid) + " got " + str(cmd) + "," + str(rl) + "," + str(mid_recvd) rc = 1 else: pubrec_recvd = sock.recv(256) if pubrec_recvd != pubrec_dup_packet: (cmd, rl, mid_recvd) = unpack('!BBH', pubrec_recvd) print "FAIL: Expected 88,2," + str(mid) + " got " + str(cmd) + "," + str(rl) + "," + str(mid_recvd) rc = 1 else: sock.send(pubrel_packet) pubcomp_recvd = sock.recv(256) if pubcomp_recvd != pubcomp_packet: (cmd, rl, mid_recvd) = unpack('!BBH', pubcomp_recvd) print "FAIL: Expected 112,2," + str(mid) + " got " + str(cmd) + "," + str(rl) + "," + str(mid_recvd) rc = 1 sock.close() finally: broker.terminate() exit(rc)
Python
0
@@ -1444,16 +1444,91 @@ %09%09else:%0A +%09%09%09# Timeout is 8 seconds which means the broker should repeat the PUBREC.%0A %09%09%09pubre
b82c783b573493a92f166181898c4122a1f2aef6
Add docstring to hadoop module
salt/modules/hadoop.py
salt/modules/hadoop.py
# -*- coding: utf-8 -*- ''' Support for hadoop :maintainer: Yann Jouanin <yann.jouanin@intelunix.fr> :maturity: new :depends: :platform: linux ''' # Import python libs import os import sys import shlex # Import salt libs import salt.utils from salt.state import STATE_INTERNAL_KEYWORDS as _STATE_INTERNAL_KEYWORDS from salt.exceptions import SaltException __authorized_modules__ = ['namenode', 'dfsadmin', 'dfs', 'fs'] def __virtual__(): ''' Check if hadoop is present, then load the module ''' if salt.utils.which('hadoop'): return 'hadoop' return False def version(): ''' Return version from hadoop version CLI Example: .. code-block:: bash salt '*' hadoop.version ''' cmd = 'hadoop version' out = __salt__['cmd.run'](cmd).split() return out[1] def _hadoop_cmd(module, command, *args): out = None if module and command: if module in __authorized_modules__: cmd = 'hadoop %s -%s %s' % (module, command, ' '.join(args)) out = __salt__['cmd.run'](cmd) else: return 'Error: Unknown module' else: return 'Error: Module and command not defined' return out def dfs(command=None, *args): ''' Execute a command on DFS CLI Example: .. code-block:: bash salt '*' hadoop.cmd ls / ''' if command: return _hadoop_cmd('dfs', command, *args) else: return 'Error: command must be provided' def dfs_present(path): cmd_return = _hadoop_cmd('dfs', 'stat', path) if 'No such file or directory' in cmd_return: return False else: return True def dfs_absent(path): cmd_return = _hadoop_cmd('dfs', 'stat', path) if 'No such file or directory' in cmd_return: return True else: return False def namenode_format(force=None): ''' Format a name node .. code-block:: bash salt '*' hadoop.namenode_format force=True ''' force_param = '' if force: force_param = '-force' return _hadoop_cmd('namenode', 'format', '-nonInteractive', force_param)
Python
0
@@ -857,24 +857,252 @@ nd, *args):%0A + '''%0A Hadoop command wrapper %0A%0A In order to prevent random execution the module name is checked%0A%0A Follows hadoop command template:%0A hadoop module -command args%0A E.g.: hadoop dfs -ls /%0A '''%0A out = No @@ -1563,19 +1563,19 @@ hadoop. -cmd +dfs ls /%0A @@ -1737,175 +1737,629 @@ -cmd_return = _hadoop_cmd('dfs', 'stat', path)%0A if 'No such file or directory' in cmd_return:%0A return False%0A else:%0A return True%0A%0Adef dfs_absent(path): +'''%0A Check if a file or directory is present on the distributed FS.%0A %0A CLI Example:%0A%0A .. code-block:: bash%0A%0A salt '*' hadoop.dfs_present /some_random_file%0A%0A%0A Returns True if the file is present%0A '''%0A%0A cmd_return = _hadoop_cmd('dfs', 'stat', path)%0A if 'No such file or directory' in cmd_return:%0A return False%0A else:%0A return True%0A%0Adef dfs_absent(path):%0A '''%0A Check if a file or directory is absent on the distributed FS.%0A %0A CLI Example:%0A%0A .. code-block:: bash%0A%0A salt '*' hadoop.dfs_absent /some_random_file%0A%0A Returns True if the file is absent%0A '''%0A %0A
88a0fbd3e5fda85770480cf28305676fad0dd568
fix slug override
bluebottle/geo/models.py
bluebottle/geo/models.py
from django.contrib.contenttypes.fields import GenericForeignKey from django.contrib.contenttypes.models import ContentType from django.db import models from django.template.defaultfilters import slugify from django.utils.translation import ugettext_lazy as _ from geoposition.fields import GeopositionField from parler.models import TranslatedFields from sorl.thumbnail import ImageField from bluebottle.geo.fields import PointField from bluebottle.utils.models import SortableTranslatableModel from .validators import Alpha2CodeValidator, Alpha3CodeValidator, \ NumericCodeValidator class GeoBaseModel(SortableTranslatableModel): """ Abstract base model for the UN M.49 geoscheme. Refs: http://unstats.un.org/unsd/methods/m49/m49.htm http://unstats.un.org/unsd/methods/m49/m49regin.htm https://en.wikipedia.org/wiki/United_Nations_geoscheme https://en.wikipedia.org/wiki/UN_M.49 """ # https://en.wikipedia.org/wiki/ISO_3166-1_numeric # http://unstats.un.org/unsd/methods/m49/m49alpha.htm numeric_code = models.CharField(_("numeric code"), max_length=3, blank=True, null=True, unique=True, validators=[NumericCodeValidator], help_text=_( "ISO 3166-1 or M.49 numeric code") ) def __unicode__(self): return self.name def save(self, *args, **kwargs): if self.numeric_code == '': self.numeric_code = None super(GeoBaseModel, self).save(*args, **kwargs) class Meta: abstract = True class Region(GeoBaseModel): """ Macro geographical (continental) region as defined by the UN M.49 geoscheme. """ translations = TranslatedFields( name=models.CharField(_("name"), max_length=100) ) class Meta(GeoBaseModel.Meta): verbose_name = _("region") verbose_name_plural = _("regions") class SubRegion(GeoBaseModel): """ Geographical sub-region as defined by the UN M.49 geoscheme. """ translations = TranslatedFields( name=models.CharField(_("name"), max_length=100) ) region = models.ForeignKey(Region, verbose_name=_("region")) class Meta(GeoBaseModel.Meta): verbose_name = _("sub region") verbose_name_plural = _("sub regions") class Country(GeoBaseModel): """ Geopolitical entity (country or territory) as defined by the UN M.49 geoscheme. """ translations = TranslatedFields( name=models.CharField(_("name"), max_length=100) ) subregion = models.ForeignKey(SubRegion, verbose_name=_("sub region")) # https://en.wikipedia.org/wiki/ISO_3166-1 alpha2_code = models.CharField(_("alpha2 code"), max_length=2, blank=True, validators=[Alpha2CodeValidator], help_text=_("ISO 3166-1 alpha-2 code")) alpha3_code = models.CharField(_("alpha3 code"), max_length=3, blank=True, validators=[Alpha3CodeValidator], help_text=_("ISO 3166-1 alpha-3 code")) # http://www.oecd.org/dac/aidstatistics/daclistofodarecipients.htm oda_recipient = models.BooleanField( _("ODA recipient"), default=False, help_text=_( "Whether a country is a recipient of Official Development" "Assistance from the OECD's Development Assistance Committee.")) @property def code(self): return self.alpha2_code class Meta(GeoBaseModel.Meta): ordering = ['translations__name'] verbose_name = _("country") verbose_name_plural = _("countries") class LocationGroup(models.Model): name = models.CharField(_('name'), max_length=255) description = models.TextField(_('description'), blank=True) class Meta(GeoBaseModel.Meta): ordering = ['name'] verbose_name = _("location group") verbose_name_plural = _("location groups") def __unicode__(self): return self.name class Location(models.Model): name = models.CharField(_('name'), max_length=255) slug = models.SlugField(_('slug'), blank=False, null=True, max_length=255) position = GeopositionField(null=True) group = models.ForeignKey('geo.LocationGroup', verbose_name=_('location group'), null=True, blank=True) city = models.CharField(_('city'), blank=True, null=True, max_length=255) country = models.ForeignKey('geo.Country', blank=True, null=True) description = models.TextField(_('description'), blank=True) image = ImageField(_('image'), max_length=255, null=True, blank=True, upload_to='location_images/', help_text=_('Location picture')) class Meta(GeoBaseModel.Meta): ordering = ['name'] verbose_name = _('office location') verbose_name_plural = _('office locations') def save(self, *args, **kwargs): if not self.slug: self.slug = slugify(self.title) super(Location, self).save() def __unicode__(self): return self.name class Place(models.Model): street_number = models.CharField(_('Street Number'), max_length=255, blank=True, null=True) street = models.CharField(_('Street'), max_length=255, blank=True, null=True) postal_code = models.CharField(_('Postal Code'), max_length=255, blank=True, null=True) locality = models.CharField(_('Locality'), max_length=255, blank=True, null=True) province = models.CharField(_('Province'), max_length=255, blank=True, null=True) country = models.ForeignKey('geo.Country') formatted_address = models.CharField(_('Address'), max_length=255, blank=True, null=True) position = GeopositionField() content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE) object_id = models.PositiveIntegerField() content_object = GenericForeignKey('content_type', 'object_id') class InitiativePlace(models.Model): street_number = models.CharField(_('Street Number'), max_length=255, blank=True, null=True) street = models.CharField(_('Street'), max_length=255, blank=True, null=True) postal_code = models.CharField(_('Postal Code'), max_length=255, blank=True, null=True) locality = models.CharField(_('Locality'), max_length=255, blank=True, null=True) province = models.CharField(_('Province'), max_length=255, blank=True, null=True) country = models.ForeignKey('geo.Country') formatted_address = models.CharField(_('Address'), max_length=255, blank=True, null=True) position = GeopositionField() class Geolocation(models.Model): street_number = models.CharField(_('Street Number'), max_length=255, blank=True, null=True) street = models.CharField(_('Street'), max_length=255, blank=True, null=True) postal_code = models.CharField(_('Postal Code'), max_length=255, blank=True, null=True) locality = models.CharField(_('Locality'), max_length=255, blank=True, null=True) province = models.CharField(_('Province'), max_length=255, blank=True, null=True) country = models.ForeignKey('geo.Country') formatted_address = models.CharField(_('Address'), max_length=255, blank=True, null=True) position = PointField() anonymized = False class JSONAPIMeta: resource_name = 'locations' def __unicode__(self): if self.locality: return u"{}, {}".format(self.locality, self.country.name) else: return self.country.name
Python
0.000002
@@ -5120,12 +5120,11 @@ elf. -titl +nam e)%0A%0A
3accbbc1b0c656238425cb4c34d2efe28ec2a885
Support snet
cfshorty.py
cfshorty.py
from hashlib import sha256 from urllib import quote, unquote from urlparse import urlparse from flask import Flask, abort, request, redirect, render_template, jsonify from jinja2 import Template from swiftly.client import Client app = Flask(__name__) # Load default config and override config from an environment variable app.config.update(dict( DEBUG = False, USE_EVENTLET = False, SWIFTLY_CACHE_PATH = './.swiftly', CF_USERNAME = '', CF_API_KEY = '', CF_REGION = 'DFW', CF_AUTH_URL = 'https://identity.api.rackspacecloud.com/v2.0', CF_CONTAINER = 'cfshorty', CF_CDN_URL = None )) app.config.from_envvar('CFSHORTY_SETTINGS', silent=False) redir_template_text = ''' <!DOCTYPE HTML> <html lang="en-US"> <head> <meta charset="UTF-8"> <meta http-equiv="refresh" content="1; url={{url}}"> <script> window.location.href = "{{url}}" </script> <title>{{url}} - Page Redirection</title> </head> <body> If you are not redirected automatically to <a href='{{url}}'>{{url}}</a> follow <a href='{{url}}'>this link</a> </body> </html> ''' redirect_template = Template(redir_template_text) def _swiftlyv(*args): print args cf = Client(app.config['CF_AUTH_URL'], app.config['CF_USERNAME'], app.config['CF_API_KEY'], cache_path=app.config['SWIFTLY_CACHE_PATH'], eventlet=app.config['USE_EVENTLET'], region=app.config['CF_REGION'], verbose=_swiftlyv) def _shortcode(url, length=6): return sha256(url).hexdigest()[-length:] def _save_url(shortcode, longurl): try: s = cf.put_object(app.config['CF_CONTAINER'], shortcode, contents=redirect_template.render(url=longurl), headers={'x-object-meta-longurl': longurl, 'content-type': 'text/html'}) print s except Exception: #because we're ghetto's we'll retry when swiftly loses the connection try: print "farking retrying" s = cf.put_object(app.config['CF_CONTAINER'], shortcode, contents=redirect_template.render(url=longurl), headers={'x-object-meta-longurl': longurl, 'content-type': 'text/html'}) print s except Exception as err: print "Got -> %s" % err s = (500, None, None) if s[0] // 100 == 2: return True else: return False def _get_url(source): print 'wtf' res = cf.head_object(app.config['CF_CONTAINER'], source) if not res[0] == 200: return None else: return res[2].get('x-object-meta-longurl', None) @app.route('/shorten') def shorten(): urlarg = request.args.get('longurl') if urlarg: clean = unquote(urlarg) parsed = urlparse(clean) if parsed.scheme and parsed.netloc: code = _shortcode(clean) print request.host if _save_url(code, clean): return jsonify({'shortcode': code, 'shorturl': '%s/%s' % (request.host, code), 'cdnshort': '%s/%s' % (app.config['CF_CDN_URL'] or request.host, code), 'longurl': clean}) else: abort(500) else: abort(400) else: abort(400) @app.route('/<shortcode>') def resolvecode(shortcode): if len(shortcode) != 6: abort(400) url = _get_url(shortcode) if not url: abort(404) else: return redirect(url) @app.route('/info/<shortcode>') def lookup(shortcode): if len(shortcode) != 6: abort(400) url = _get_url(shortcode) if not url: abort(404) else: return redirect(url) @app.route('/') def index(): return render_template('index.html') if __name__ == '__main__': app.run(host='0.0.0.0', debug=app.config['DEBUG'])
Python
0
@@ -421,24 +421,46 @@ /.swiftly',%0A + USE_SNET = False,%0A CF_USERN @@ -1300,16 +1300,40 @@ + app.config%5B'USE_SNET'%5D, cache_p @@ -1369,16 +1369,28 @@ _PATH'%5D, +%0A eventle @@ -1418,28 +1418,16 @@ NTLET'%5D, -%0A region= @@ -1450,16 +1450,28 @@ EGION'%5D, +%0A verbose
c326026ac36f94565816c1082afc2e8e52a5c664
Fix bug related to prepresentation of site objects
src/tmlib/models/site.py
src/tmlib/models/site.py
import logging import numpy as np from sqlalchemy import Column, Integer, ForeignKey, Boolean from sqlalchemy.orm import relationship, backref from sqlalchemy import UniqueConstraint from tmlib.models.base import ExperimentModel, DateMixIn logger = logging.getLogger(__name__) class Site(ExperimentModel, DateMixIn): '''A *site* is a unique `y`, `x` position projected onto the *plate* bottom plane that was scanned by the microscope. Attributes ---------- y: int zero-based row index of the image within the well x: int zero-based column index of the image within the well height: int number of pixels along the vertical axis of the site width: int number of pixels along the horizontal axis of the site well_id: int ID of the parent well well: tmlib.well.Well parent well to which the site belongs shifts: [tmlib.models.SiteShifts] shifts that belong to the site intersection: tmlib.models.SiteIntersection intersection that belongs to the site channel_image_files: List[tmlib.models.ChannelImageFile] channel image files that belong to the site mapobject_segmentations: List[tmlib.models.MapobjectSegmentation] segmentations that belong to the site omitted: bool whether the image file is considered empty, i.e. consisting only of background pixels without having biologically relevant information ''' #: str: name of the corresponding database table __tablename__ = 'sites' __table_args__ = (UniqueConstraint('x', 'y', 'well_id'), ) # Table columns y = Column(Integer, index=True) x = Column(Integer, index=True) height = Column(Integer, index=True) width = Column(Integer, index=True) omitted = Column(Boolean, index=True) well_id = Column( Integer, ForeignKey('wells.id', onupdate='CASCADE', ondelete='CASCADE'), index=True ) # Relationships to other tables well = relationship( 'Well', backref=backref('sites', cascade='all, delete-orphan') ) def __init__(self, y, x, height, width, well_id, omitted=False): ''' Parameters ---------- y: int zero-based row index of the image within the well x: int zero-based column index of the image within the well height: int number of pixels along the vertical axis of the site width: int number of pixels along the horizontal axis of the site well_id: int ID of the parent well omitted: bool, optional whether the image file is considered empty, i.e. consisting only of background pixels without having biologically relevant information (default: ``False``) ''' self.y = y self.x = x self.height = height self.width = width self.well_id = well_id self.omitted = omitted @property def coordinate(self): '''Tuple[int]: row, column coordinate of the site within the well''' return (self.y, self.x) @property def image_size(self): '''Tuple[int]: number of pixels along the vertical (*y*) and horizontal (*x*) axis, i.e. height and width of the site ''' return (self.height, self.width) @property def offset(self): '''Tuple[int]: *y*, *x* coordinate of the top, left corner of the site relative to the layer overview at the maximum zoom level ''' logger.debug('calculate offset for site %d', self.id) well = self.well plate = well.plate experiment = plate.experiment y_offset = ( # Sites in the well above the site self.y * self.image_size[0] + # Potential displacement of sites in y-direction self.y * experiment.vertical_site_displacement + # Wells and plates above the well well.offset[0] ) x_offset = ( # Sites in the well left of the site self.x * self.image_size[1] + # Potential displacement of sites in y-direction self.x * experiment.horizontal_site_displacement + # Wells and plates left of the well well.offset[1] ) return (y_offset, x_offset) def __repr__(self): return ( '<Site(id=%r, well=%r, y=%r, x=%r)>' % (self.id, self.well.name, self.y, self.x) )
Python
0
@@ -4472,16 +4472,19 @@ %25r, well +_id =%25r, y=%25 @@ -4531,13 +4531,11 @@ well -.name +_id , se
c546192a83dce300ad46193e351229a5969e979d
Remove warming up from TestBase._test_jitted() (#571)
sdc/tests/tests_perf/test_perf_base.py
sdc/tests/tests_perf/test_perf_base.py
import os import unittest import numba from sdc.tests.tests_perf.test_perf_utils import * class TestBase(unittest.TestCase): iter_number = 5 results_class = TestResults @classmethod def create_test_results(cls): drivers = [] if is_true(os.environ.get('SDC_TEST_PERF_EXCEL', True)): drivers.append(ExcelResultsDriver('perf_results.xlsx')) if is_true(os.environ.get('SDC_TEST_PERF_CSV', False)): drivers.append(CSVResultsDriver('perf_results.csv')) results = cls.results_class(drivers) if is_true(os.environ.get('LOAD_PREV_RESULTS')): results.load() return results @classmethod def setUpClass(cls): cls.test_results = cls.create_test_results() cls.total_data_length = [] cls.num_threads = int(os.environ.get('NUMBA_NUM_THREADS', config.NUMBA_NUM_THREADS)) cls.threading_layer = os.environ.get('NUMBA_THREADING_LAYER', config.THREADING_LAYER) @classmethod def tearDownClass(cls): # TODO: https://jira.devtools.intel.com/browse/SAT-2371 cls.test_results.print() cls.test_results.dump() def _test_jitted(self, pyfunc, record, *args, **kwargs): # compilation time record["compile_results"] = calc_compilation(pyfunc, *args, **kwargs) cfunc = numba.njit(pyfunc) # Warming up cfunc(*args, **kwargs) # execution and boxing time record["test_results"], record["boxing_results"] = \ get_times(cfunc, *args, **kwargs) def _test_python(self, pyfunc, record, *args, **kwargs): record["test_results"], _ = \ get_times(pyfunc, *args, **kwargs) def _test_jit(self, pyfunc, base, *args): record = base.copy() record["test_type"] = 'SDC' self._test_jitted(pyfunc, record, *args) self.test_results.add(**record) def _test_py(self, pyfunc, base, *args): record = base.copy() record["test_type"] = 'Python' self._test_python(pyfunc, record, *args) self.test_results.add(**record)
Python
0.000001
@@ -1365,61 +1365,8 @@ c)%0A%0A - # Warming up%0A cfunc(*args, **kwargs)%0A%0A
56df3ee2ac088f40dd43df6e6fd8641aedd320b9
Use explicit class-based view name
corehq/apps/auditcare/tests/test_middleware.py
corehq/apps/auditcare/tests/test_middleware.py
from contextlib import contextmanager from unittest.mock import patch from django.conf import settings as default_settings from django.test import SimpleTestCase from testil import Config, eq from .. import middleware as mod class TestAuditMiddleware(SimpleTestCase): def setUp(self): self.request = Config(user="username") def test_generic_view_not_audited_with_default_settings(self): func = make_view() with configured_middleware() as ware: ware.process_view(self.request, func, ARGS, KWARGS) self.assert_no_audit(self.request) def test_admin_view_is_audited_with_default_settings(self): func = make_view(module="django.contrib.admin") with configured_middleware() as ware: ware.process_view(self.request, func, ARGS, KWARGS) self.assert_audit(self.request) def test_generic_view_is_audited_with_audit_all_views_setting(self): func = make_view() settings = Settings(AUDIT_ALL_VIEWS=True) with configured_middleware(settings) as ware: ware.process_view(self.request, func, ARGS, KWARGS) self.assert_audit(self.request) def test_generic_view_class_is_audited_with_audit_all_views_setting(self): func = make_view("TheView") settings = Settings(AUDIT_ALL_VIEWS=True) with configured_middleware(settings) as ware: ware.process_view(self.request, func, ARGS, KWARGS) self.assert_audit(self.request) def test_audit_views_setting(self): func = make_view("ChangeMyPasswordView", "corehq.apps.settings.views") with configured_middleware() as ware: ware.process_view(self.request, func, ARGS, KWARGS) self.assert_audit(self.request) def test_audit_modules_setting(self): func = make_view("TheView", "corehq.apps.reports") with configured_middleware() as ware: ware.process_view(self.request, func, ARGS, KWARGS) self.assert_audit(self.request) def test_debug_media_view_not_audited(self): func = make_view("debug_media", "debug_toolbar.views") with configured_middleware() as ware: ware.process_view(self.request, func, ARGS, KWARGS) self.assert_no_audit(self.request) def test_staticfiles_not_audited(self): from django.contrib.staticfiles.views import serve with configured_middleware() as ware: ware.process_view(self.request, serve, ARGS, KWARGS) self.assert_no_audit(self.request) def test_process_response_without_audit_doc(self): with configured_middleware() as ware: ware(self.request) assert not hasattr(self.request, "audit_doc") def test_process_response_with_audit_doc_with_user(self): self.request.audit_doc = audit_doc = fake_audit_doc(user="username") with configured_middleware() as ware: ware(self.request) self.assertEqual(audit_doc.status_code, 200) self.assertEqual(audit_doc.user, "username") self.assertEqual(audit_doc.save.count, 1) def test_process_response_with_audit_doc_and_audit_user(self): self.request.audit_doc = audit_doc = fake_audit_doc(user=None) self.request.audit_user = "audit_user" with configured_middleware() as ware: ware(self.request) self.assertEqual(audit_doc.status_code, 200) self.assertEqual(audit_doc.user, "audit_user") self.assertEqual(audit_doc.save.count, 1) def test_process_response_with_audit_doc_and_couch_user(self): self.request.audit_doc = audit_doc = fake_audit_doc(user=None) self.request.couch_user = Config(username="couch_user") with configured_middleware() as ware: ware(self.request) self.assertEqual(audit_doc.status_code, 200) self.assertEqual(audit_doc.user, "couch_user") self.assertEqual(audit_doc.save.count, 1) def assert_audit(self, request): audit_doc = getattr(request, "audit_doc", None) self.assertEqual(audit_doc, EXPECTED_AUDIT, "audit expected") def assert_no_audit(self, request): self.assertFalse(hasattr(request, "audit_doc"), "unexpected audit") def test_make_view_function(): func = make_view() eq(func.__name__, "the_view") eq(func.__module__, "corehq.apps.auditcare.views") def test_make_view_class(): func = make_view("TheView") eq(func.__class__.__name__, "TheView") eq(func.__module__, "corehq.apps.auditcare.views") def test_make_admin_view_function(): func = make_view("the_view", "django.contrib.admin") eq(func.__name__, "the_view") eq(func.__module__, "django.contrib.admin") def test_make_admin_view_class(): func = make_view("TheView", "django.contrib.admin") eq(func.__class__.__name__, "TheView") eq(func.__module__, "django.contrib.admin") ARGS = () # positional view args are not audited, therefore are empty KWARGS = {"non": "empty", "and": "audited", "view": "kwargs"} EXPECTED_AUDIT = Config(user="username", view_kwargs=KWARGS) Settings = Config( AUDIT_MODULES=default_settings.AUDIT_MODULES, AUDIT_VIEWS=default_settings.AUDIT_VIEWS, ) @contextmanager def configured_middleware(settings=Settings): response = Config(status_code=200) with patch.object(mod.NavigationEventAudit, "audit_view", fake_audit), \ patch.object(mod, "settings", settings): yield mod.AuditMiddleware(lambda request: response) def make_view(name="the_view", module="corehq.apps.auditcare.views"): is_class = name[0].isupper() if is_class: view_func = type(name, (), {})() else: def view_func(): assert False, "unexpected call" view_func.__name__ = name view_func.__module__ = module return view_func def fake_audit(request, user, view_func, view_kwargs, extra={}): return Config(user=user, view_kwargs=view_kwargs) def fake_audit_doc(**kwargs): def save(): save.count += 1 save.count = 0 return Config(save=save, **kwargs)
Python
0.000003
@@ -1260,35 +1260,37 @@ nc = make_view(%22 -The +Class View%22)%0A s @@ -1819,35 +1819,37 @@ nc = make_view(%22 -The +Class View%22, %22corehq.a @@ -4429,27 +4429,29 @@ make_view(%22 -The +Class View%22)%0A e @@ -4470,35 +4470,37 @@ ss__.__name__, %22 -The +Class View%22)%0A eq(fu @@ -4781,19 +4781,21 @@ e_view(%22 -The +Class View%22, %22 @@ -4854,11 +4854,13 @@ _, %22 -The +Class View
ad74ac75f6fea996a2ffb6efd62b0628ff8ccc3e
check if executor and loop are running before close
bndl/compute/executor.py
bndl/compute/executor.py
# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from concurrent.futures import TimeoutError from urllib.parse import urlunparse import asyncio import atexit import logging import re import signal import sys from bndl.compute.blocks import BlockManager from bndl.compute.broadcast import BroadcastManager from bndl.compute.memory import LocalMemoryManager from bndl.compute.shuffle import ShuffleManager from bndl.compute.tasks import Tasks from bndl.net.connection import urlparse from bndl.rmi.node import RMINode from bndl.util.aio import get_loop, get_loop_thread, stop_loop, run_coroutine_threadsafe from bndl.util.threads import dump_threads logger = logging.getLogger(__name__) class Executor(RMINode): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.services['blocks'] = BlockManager(self) self.services['broadcast'] = BroadcastManager(self) self.services['shuffle'] = ShuffleManager(self) self.services['tasks'] = Tasks(self) self.memory_manager = LocalMemoryManager() @asyncio.coroutine def start(self): yield from super().start() self.memory_manager.start() @asyncio.coroutine def stop(self): self.memory_manager.stop() yield from super().stop() def _executor_address(worker_address, executor_id): worker_address = urlparse(worker_address) executor_address = list(worker_address) executor_port = worker_address.port + int(executor_id) executor_address[1] = re.sub(':\d+', ':%s' % (executor_port), executor_address[1]) executor_address = urlunparse(executor_address) return executor_address def main(): worker_address, executor_id = sys.argv[1:] executor_address = _executor_address(worker_address, executor_id) loop = get_loop(start=True) executor = Executor( node_type='executor', addresses=[executor_address], seeds=[worker_address], loop=loop, ) executor.start_async().result() @atexit.register def stop(*args): try: run_coroutine_threadsafe(executor.stop(), loop).result(1) except TimeoutError: pass stop_loop() def exit_handler(sig, frame): stop() signal.signal(signal.SIGINT, exit_handler) signal.signal(signal.SIGTERM, exit_handler) signal.signal(signal.SIGUSR1, dump_threads) get_loop_thread().join() if __name__ == '__main__': main()
Python
0.000001
@@ -2541,24 +2541,79 @@ top(*args):%0A + if executor.running and loop.is_running():%0A try: @@ -2613,16 +2613,20 @@ try:%0A + @@ -2695,16 +2695,20 @@ + except T @@ -2720,16 +2720,20 @@ tError:%0A +
83efc4d5ebf22e68529521c3c9198b8105943b19
Fix error with invalid team information and team size limits (#1220)
CTFd/teams.py
CTFd/teams.py
from flask import render_template, request, redirect, url_for, Blueprint from CTFd.models import db, Teams from CTFd.utils.decorators import authed_only, ratelimit from CTFd.utils.decorators.modes import require_team_mode from CTFd.utils import config, get_config from CTFd.utils.user import get_current_user from CTFd.utils.crypto import verify_password from CTFd.utils.decorators.visibility import ( check_account_visibility, check_score_visibility, ) from CTFd.utils.helpers import get_errors, get_infos teams = Blueprint("teams", __name__) @teams.route("/teams") @check_account_visibility @require_team_mode def listing(): page = abs(request.args.get("page", 1, type=int)) results_per_page = 50 page_start = results_per_page * (page - 1) page_end = results_per_page * (page - 1) + results_per_page # TODO: Should teams confirm emails? # if get_config('verify_emails'): # count = Teams.query.filter_by(verified=True, banned=False).count() # teams = Teams.query.filter_by(verified=True, banned=False).slice(page_start, page_end).all() # else: count = Teams.query.filter_by(hidden=False, banned=False).count() teams = ( Teams.query.filter_by(hidden=False, banned=False) .slice(page_start, page_end) .all() ) pages = int(count / results_per_page) + (count % results_per_page > 0) return render_template("teams/teams.html", teams=teams, pages=pages, curr_page=page) @teams.route("/teams/join", methods=["GET", "POST"]) @authed_only @require_team_mode @ratelimit(method="POST", limit=10, interval=5) def join(): infos = get_infos() errors = get_errors() if request.method == "GET": team_size_limit = get_config("team_size", default=0) if team_size_limit: plural = "" if team_size_limit == 1 else "s" infos.append( "Teams are limited to {limit} member{plural}".format( limit=team_size_limit, plural=plural ) ) return render_template("teams/join_team.html", infos=infos, errors=errors) if request.method == "POST": teamname = request.form.get("name") passphrase = request.form.get("password", "").strip() team = Teams.query.filter_by(name=teamname).first() user = get_current_user() team_size_limit = get_config("team_size", default=0) if team_size_limit and len(team.members) >= team_size_limit: errors.append( "{name} has already reached the team size limit of {limit}".format( name=team.name, limit=team_size_limit ) ) return render_template("teams/join_team.html", infos=infos, errors=errors) if team and verify_password(passphrase, team.password): user.team_id = team.id db.session.commit() if len(team.members) == 1: team.captain_id = user.id db.session.commit() return redirect(url_for("challenges.listing")) else: errors.append("That information is incorrect") return render_template("teams/join_team.html", infos=infos, errors=errors) @teams.route("/teams/new", methods=["GET", "POST"]) @authed_only @require_team_mode def new(): infos = get_infos() errors = get_errors() if request.method == "GET": team_size_limit = get_config("team_size", default=0) if team_size_limit: plural = "" if team_size_limit == 1 else "s" infos.append( "Teams are limited to {limit} member{plural}".format( limit=team_size_limit, plural=plural ) ) return render_template("teams/new_team.html", infos=infos, errors=errors) elif request.method == "POST": teamname = request.form.get("name", "").strip() passphrase = request.form.get("password", "").strip() errors = get_errors() user = get_current_user() existing_team = Teams.query.filter_by(name=teamname).first() if existing_team: errors.append("That team name is already taken") if not teamname: errors.append("That team name is invalid") if errors: return render_template("teams/new_team.html", errors=errors) team = Teams(name=teamname, password=passphrase, captain_id=user.id) db.session.add(team) db.session.commit() user.team_id = team.id db.session.commit() return redirect(url_for("challenges.listing")) @teams.route("/team") @authed_only @require_team_mode def private(): user = get_current_user() if not user.team_id: return render_template("teams/team_enrollment.html") team_id = user.team_id team = Teams.query.filter_by(id=team_id).first_or_404() solves = team.get_solves() awards = team.get_awards() place = team.place score = team.score return render_template( "teams/private.html", solves=solves, awards=awards, user=user, team=team, score=score, place=place, score_frozen=config.is_scoreboard_frozen(), ) @teams.route("/teams/<int:team_id>") @check_account_visibility @check_score_visibility @require_team_mode def public(team_id): errors = get_errors() team = Teams.query.filter_by(id=team_id, banned=False, hidden=False).first_or_404() solves = team.get_solves() awards = team.get_awards() place = team.place score = team.score if errors: return render_template("teams/public.html", team=team, errors=errors) return render_template( "teams/public.html", solves=solves, awards=awards, team=team, score=score, place=place, score_frozen=config.is_scoreboard_frozen(), )
Python
0
@@ -2330,32 +2330,100 @@ current_user()%0A%0A + if team and verify_password(passphrase, team.password):%0A team_siz @@ -2467,32 +2467,36 @@ ault=0)%0A + + if team_size_lim @@ -2544,32 +2544,36 @@ it:%0A + errors.append(%0A @@ -2563,32 +2563,36 @@ errors.append(%0A + @@ -2679,16 +2679,20 @@ + name=tea @@ -2729,34 +2729,42 @@ -)%0A + )%0A )%0A @@ -2753,32 +2753,36 @@ )%0A + retu @@ -2857,72 +2857,8 @@ s)%0A%0A - if team and verify_password(passphrase, team.password):%0A
d310075ec946e3931de8e8b98fe32e5938c656d8
update version string (before too long we should have proper version tags).
caesure/connection.py
caesure/connection.py
# -*- Mode: Python -*- import re import random import struct import time import coro from caesure.bitcoin import dhash, network from caesure.ansi import * from caesure.proto import VERSION, pack_inv, unpack_version def make_nonce(): return random.randint (0, 1 << 64) ipv6_server_re = re.compile ('\[([A-Fa-f0-9:]+)\]:([0-9]+)') ipv4_server_re = re.compile ('([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+):([0-9]+)') def parse_addr_arg (addr): m = ipv4_server_re.match (addr) if not m: m = ipv6_server_re.match (addr) if not m: raise ValueError ("bad server address: %r" % (addr,)) ip0, port0 = m.groups() port0 = int (port0) addr0 = (ip0, port0) return addr0 class BaseConnection: # Note: when you derive from this class you may want to tweak # the protocol version depending on what features you will provide or expect. # protocol version version = 70001 # software version version_string = '/caesure:201411118/' # relay flag (see bip37 for details...) relay = False def __init__ (self, my_addr, other_addr, conn=None, log_fun=None, verbose=False, packet=False): self.log_fun = log_fun self.verbose = verbose self.packet = packet self.my_addr = my_addr self.other_addr = other_addr self.nonce = make_nonce() self.other_version = None self.send_mutex = coro.mutex() if conn is None: if ':' in other_addr[0]: self.conn = coro.tcp6_sock() else: self.conn = coro.tcp_sock() else: self.conn = conn self.packet_count = 0 coro.spawn (self.go) def log (self, *args): if self.log_fun is not None: self.log_fun (*args) def connect (self): self.log ('connect', self.other_addr) self.conn.connect (self.other_addr) def send_packet (self, command, payload): with self.send_mutex: lc = len(command) assert (lc < 12) cmd = command + ('\x00' * (12 - lc)) h = dhash (payload) checksum, = struct.unpack ('<I', h[:4]) self.conn.writev ([ network.MAGIC, cmd, struct.pack ('<II', len(payload), checksum), payload ]) if self.packet: self.log ('send', self.other_addr, command, payload) if self.verbose and command not in ('ping', 'pong'): WT (' ' + command) def get_our_block_height (self): return 0 def send_version (self): v = VERSION() v.version = self.version v.services = 1 v.timestamp = int(time.time()) v.you_addr = (1, self.other_addr) v.me_addr = (1, self.my_addr) v.nonce = self.nonce v.sub_version_num = self.version_string start_height = self.get_our_block_height() if start_height < 0: start_height = 0 v.start_height = start_height v.relay = self.relay self.send_packet ('version', v.pack()) def getdata (self, items): "request (TX|BLOCK)+ from the other side" # note: pack_getdata == pack_inv self.send_packet ('getdata', pack_inv (items)) def get_packet (self, timeout=1800): data = coro.with_timeout (timeout, self.conn.recv_exact, 24) if not data: self.log ('closed', self.other_addr) return None, None magic, command, length, checksum = struct.unpack ('<I12sII', data) command = command.strip ('\x00') if self.verbose and command not in ('ping', 'pong'): WF (' ' + command) self.packet_count += 1 self.header = magic, command, length if length: payload = coro.with_timeout (30, self.conn.recv_exact, length) else: payload = '' if self.packet: self.log ('recv', self.other_addr, command, payload) return (command, payload) # please see server.py:Connection for a more complete version # of incoming packet processing. def go (self): try: try: coro.with_timeout (30, self.connect) self.send_version() while 1: command, payload = self.get_packet() if command is None: break self.do_command (command, payload) except (OSError, EOFError, coro.TimeoutError): pass finally: self.conn.close() def check_command_name (self, command): return re.match ('^[A-Za-z]+$', command) is not None def do_command (self, cmd, data): if self.check_command_name (cmd): method = getattr (self, 'cmd_%s' % cmd,) method (data) else: W ('connection: bad command %r\n' % (cmd,)) def cmd_version (self, data): self.other_version = unpack_version (data) self.send_packet ('verack', '') def cmd_verack (self, data): pass def cmd_ping (self, data): self.send_packet ('pong', data) def cmd_pong (self, data): pass
Python
0
@@ -980,12 +980,11 @@ 0141 -1118 +203 /'%0A
d6cfc95c436b7eb4be372795948a8f9097d60015
Remove unused import
astropy_helpers/sphinx/ext/__init__.py
astropy_helpers/sphinx/ext/__init__.py
from __future__ import division, absolute_import, print_function from .numpydoc import setup
Python
0.000001
@@ -63,32 +63,4 @@ on%0A%0A -from .numpydoc import setup%0A
abb870fff0a4bdad84486f7786654604ee23d6ce
allow for Decimal and other types not inherently addable to float in SMA calculator.
cubes/statutils.py
cubes/statutils.py
from collections import deque from cubes.model import Attribute def _wma(values): n = len(values) denom = n * (n + 1) / 2 total = 0.0 idx = 1 for val in values: total += float(idx) * float(val) idx += 1 return round(total / denom, 4) def _sma(values): # use all the values return round(reduce(lambda i, c: c + i, values, 0.0) / len(values), 2) def weighted_moving_average_factory(measure, drilldown_paths, source_aggregations): return _moving_average_factory(measure, drilldown_paths, source_aggregations, _wma, 'wma') def simple_moving_average_factory(measure, drilldown_paths, source_aggregations): return _moving_average_factory(measure, drilldown_paths, source_aggregations, _sma, 'sma') def _moving_average_factory(measure, drilldown_paths, source_aggregations, avg_func, aggregation_name): if not drilldown_paths or not source_aggregations: return lambda item: None # if the level we're drilling to doesn't have aggregation_units configured, # we're not doing any calculations key_drilldown_paths = [] num_units = None for path in drilldown_paths: relevant_level = path[2][-1] these_num_units = None if relevant_level.info: these_num_units = relevant_level.info.get('aggregation_units', None) if these_num_units is None: key_drilldown_paths.append(path) else: num_units = these_num_units if num_units is None or not isinstance(num_units, int) or num_units < 2: return lambda item: None # if no key_drilldown_paths, the key is always the empty tuple. def key_extractor(item): vals = [] for dim, hier, levels in key_drilldown_paths: for level in levels: vals.append( item.get(level.key.ref()) ) return tuple(vals) calculators = [] measure_baseref = measure.ref() for agg in source_aggregations: if agg != "identity": measure_ref = measure_baseref + "_" + agg else: measure_ref = measure_baseref calculators.append( _calc_func(measure_ref + "_" + aggregation_name, measure_ref, avg_func, key_extractor, num_units) ) def calculator(item): for calc in calculators: calc(item) return calculator def _calc_func(field_name, measure_ref, avg_func, key_extractor, num_units): by_value_map = {} def f(item): by_value = key_extractor(item) val_list = by_value_map.get(by_value) if val_list is None: val_list = deque() by_value_map[by_value] = val_list val = item.get(measure_ref) if val is not None: val_list.append(val) while len(val_list) > num_units: val_list.popleft() if len(val_list) > 0: item[field_name] = avg_func(val_list) return f
Python
0
@@ -349,17 +349,24 @@ a i, c: -c +float(c) + i, va
57a913f2808ce1525746714e4284355df0510d03
FIX simpler fix of BS4 bug with tags
lyricstagger/helpers/wikia.py
lyricstagger/helpers/wikia.py
""" Helper to download lyrics from lyrics.wikia.com """ from __future__ import unicode_literals import re import requests from bs4 import BeautifulSoup, NavigableString, Tag import logging class Wikia: """Lyrics Downloader for lyrics.wikia.com""" url = "http://lyrics.wikia.com/api.php" def __init__(self): pass @staticmethod def parse(text, gracenote): """Parse lyrics from html""" # parse the result soup = BeautifulSoup(text) lyricbox = soup.find('div', "lyricbox") if lyricbox is None: logging.debug("BeautifulSoup doesn't find content") return None if gracenote: # gracenote lyrics are in a separate paragraph lyricbox = lyricbox.find('p') lyrics = '' for content in lyricbox.contents: if type(content) == NavigableString: lyrics += content.strip() elif type(content) == Tag: if content.string and content.name == "b" and content.string.startswith("Instrumental"): return '{{Instrumental}}' elif content.name == "br": lyrics += '\n' if content.name in ['br', 'b', 'span'] and content.string: lyrics += content.string.strip() return lyrics.strip() @staticmethod def get_raw_data(artist, song): """Download html with lyrics, return None or tuple (text, gracenote)""" if not artist or not song: return None # wikia needs both informations payload = {'artist': artist, 'song': song, 'fmt': "json"} result = requests.get(Wikia.url, params=payload) if result.status_code != 200: return None # The api returns a pseudo json object, that contains a url. match = re.search("'url':'([^']+)'", result.text) if match is None: return None html_url = match.group(1) logging.debug('fetch url %s', html_url) if 'action=edit' in html_url: logging.debug("no lyrics found") return None result = requests.get(html_url) gracenote = False if result.status_code != 200: # try it also with Gracenote: (e.g. Glen Hansard - High Hope) html_url = html_url[:9] + \ html_url[9:].replace('/', '/Gracenote:', 1) logging.debug('fetch url %s', html_url) result = requests.get(html_url) gracenote = True if result.status_code != 200: return None return (result.text, gracenote) @staticmethod def fetch(artist, song, _): """Fetch lyrics from remote url""" data = Wikia.get_raw_data(artist, song) if data: return Wikia.parse(data[0], data[1]) return None
Python
0
@@ -1225,28 +1225,23 @@ ame +not in %5B' -br', 'b', 'span +script '%5D a
071661e2ffa39440a57c16b2b7f7ffb7d05845d5
check SUNLIGHT_KEY in addition to SUNLIGHT_API_KEY
call_server/config.py
call_server/config.py
import os import twilio.rest import sunlight class DefaultConfig(object): PROJECT = 'CallPower' DEBUG = False TESTING = False ENVIRONMENT = "Default" APP_NAME = "call_server" APPLICATION_ROOT = None # the path where the application is configured SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URI', 'sqlite:////%s/dev.db' % os.path.abspath(os.curdir)) SQLALCHEMY_ECHO = False BABEL_DEFAULT_LOCALE = 'en' BABEL_DEFAULT_TIMEZONE = 'UTC' ACCEPT_LANGUAGES = {'en': 'English', 'es': 'Spanish'} CACHE_TYPE = 'simple' CACHE_THRESHOLD = 100000 # because we're caching political data CACHE_DEFAULT_TIMEOUT = 60*60*24*365*2 # there's no infinite timeout, so default to 2 year election cycle CSRF_ENABLED = False INSTALLED_ORG = os.environ.get('INSTALLED_ORG') SITENAME = os.environ.get('SITENAME', PROJECT) STORE_PROVIDER = 'flask_store.providers.local.LocalProvider' STORE_DOMAIN = 'http://localhost:5000' # requires url scheme for Flask-store.absolute_url to work TWILIO_CLIENT = twilio.rest.TwilioRestClient( os.environ.get('TWILIO_ACCOUNT_SID'), os.environ.get('TWILIO_AUTH_TOKEN')) TWILIO_PLAYBACK_APP = os.environ.get('TWILIO_PLAYBACK_APP') # limit on the length of the call TWILIO_TIME_LIMIT = os.environ.get('TWILIO_TIME_LIMIT', 60 * 60) # one hour max # limit on the amount of time to ring before giving up TWILIO_TIMEOUT = os.environ.get('TWILIO_TIMEOUT', 60) # seconds SECRET_KEY = os.environ.get('SECRET_KEY') SUNLIGHT_API_KEY = os.environ.get('SUNLIGHT_API_KEY') sunlight.config.API_KEY = SUNLIGHT_API_KEY LOG_PHONE_NUMBERS = True MAIL_SERVER = 'localhost' class ProductionConfig(DefaultConfig): DEBUG = False ENVIRONMENT = "Production" SERVER_NAME = os.environ.get('SERVER_NAME') APPLICATION_ROOT = os.environ.get('APPLICATION_ROOT', None) ADMIN_API_KEY = os.environ.get('ADMIN_API_KEY', None) CACHE_TYPE = 'redis' CACHE_REDIS_URL = os.environ.get('REDIS_URL') CACHE_KEY_PREFIX = 'call-power' LOG_PHONE_NUMBERS = os.environ.get('LOG_PHONE_NUMBERS', False) OUTPUT_LOG = os.environ.get('OUTPUT_LOG', False) MAIL_SERVER = os.environ.get('MAIL_SERVER', 'localhost') MAIL_PORT = os.environ.get('MAIL_PORT', 25) MAIL_USERNAME = os.environ.get('MAIL_USERNAME') MAIL_PASSWORD = os.environ.get('MAIL_PASSWORD') MAIL_DEFAULT_SENDER = os.environ.get('MAIL_DEFAULT_SENDER') MAIL_USE_TLS = os.environ.get('MAIL_USE_TLS', True) SQLALCHEMY_POOL_RECYCLE = 60 * 60 # 1 hour SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URI') STORE_PROVIDER = 'flask_store.providers.s3.S3Provider' # TODO, change to S3GeventProvider when we re-enable gevent STORE_PATH = 'uploads' STORE_S3_BUCKET = os.environ.get('STORE_S3_BUCKET') STORE_S3_REGION = os.environ.get('STORE_S3_REGION') STORE_S3_ACCESS_KEY = os.environ.get('S3_ACCESS_KEY') STORE_S3_SECRET_KEY = os.environ.get('S3_SECRET_KEY') STORE_DOMAIN = 'https://%s.s3-%s.amazonaws.com/' % (STORE_S3_BUCKET, STORE_S3_REGION) class HerokuConfig(ProductionConfig): # Heroku addons use a few different environment variable names ENVIRONMENT = "Heroku" # db via heroku postgres SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL') # cache via heroku-redis CACHE_TYPE = 'redis' CACHE_REDIS_URL = os.environ.get('REDIS_URL') CACHE_KEY_PREFIX = 'call-power' # smtp via sendgrid MAIL_SERVER = 'smtp.sendgrid.net' MAIL_PORT = 587 MAIL_USERNAME = os.environ.get('SENDGRID_USERNAME') MAIL_PASSWORD = os.environ.get('SENDGRID_PASSWORD') MAIL_DEFAULT_SENDER = os.environ.get('MAIL_DEFAULT_SENDER', 'info@callpower.org') class DevelopmentConfig(DefaultConfig): DEBUG = True DEBUG_INFO = False TESTING = False ENVIRONMENT = "Development" ADMIN_API_KEY = os.environ.get('ADMIN_API_KEY', 'ThisIsATestAdminAPIKey!') WTF_CSRF_ENABLED = True DEBUG_TB_INTERCEPT_REDIRECTS = False SECRET_KEY = os.environ.get('SECRET_KEY', 'NotARealSecretKey,YouShouldSetOneInYour.Env') SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URI', 'sqlite:////%s/dev.db' % os.path.abspath(os.curdir)) SERVER_NAME = 'localhost:5000' STORE_PATH = '%s/instance/uploads/' % os.path.abspath(os.curdir) STORE_DOMAIN = 'http://localhost:5000' MAIL_DEBUG = True MAIL_PORT = 1025 MAIL_DEFAULT_SENDER = 'debug' class TestingConfig(DefaultConfig): ENVIRONMENT = "Testing" TESTING = True WTF_CSRF_ENABLED = False SQLALCHEMY_DATABASE_URI = 'sqlite://' # keep testing db in memory CACHE_TYPE = 'null' CACHE_NO_NULL_WARNING = True
Python
0.000013
@@ -1632,24 +1632,111 @@ T_API_KEY')%0A + if not SUNLIGHT_API_KEY:%0A SUNLIGHT_API_KEY = os.environ.get('SUNLIGHT_KEY')%0A sunlight
add62d6fddb5e71ce565ddbbb0da33098c311cef
Add ImageMetrics to the -multimedia channel
channels.py
channels.py
channels = { "#huggle": lambda x: x.get("X-Bugzilla-Product", None) == "Huggle", "#pywikibot": lambda x: x.get("X-Bugzilla-Product", None) == "Pywikibot", "#wikimedia-corefeatures": lambda x: (x.get("X-Bugzilla-Product", None) == "MediaWiki extensions") and \ (x.get("X-Bugzilla-Component", None) in ["Echo", "Flow", "PageCuration", "Thanks", "WikiLove"]), "#mediawiki-i18n": lambda x: (x.get("X-Bugzilla-Component", None) in ["ContentTranslation"]), "#wikimedia-labs": lambda x: x.get("X-Bugzilla-Product", None) in ["Tool Labs tools", "Wikimedia Labs"] or \ ( (x.get("X-Bugzilla-Product", None) == "MediaWiki extensions") and \ (x.get("X-Bugzilla-Component", None) in ["OpenStackManager"]) ), "#wikimedia-mobile": lambda x: x.get("X-Bugzilla-Product", None) in ["Wikimedia Mobile", "Commons App", "Wikipedia App", "MobileFrontend"], "#wikimedia-qa": lambda x: ( (x.get("X-Bugzilla-Product", None) == "Wikimedia") and \ (x.get("X-Bugzilla-Component", None) in ["Continuous integration", "Quality Assurance"]) ) or \ ( (x.get("X-Bugzilla-Product", None) == "Wikimedia Labs") and \ (x.get("X-Bugzilla-Component", None) == "deployment-prep (beta)") ), "#mediawiki-visualeditor": lambda x: x.get("X-Bugzilla-Product", None) in ["VisualEditor", "OOjs", "OOjs UI"] or \ ( (x.get("X-Bugzilla-Product", None) == "MediaWiki extensions") and \ (x.get("X-Bugzilla-Component", None) in ["TemplateData", "Cite", "WikiEditor"]) ) or \ ( (x.get("X-Bugzilla-Product", None) == "MediaWiki") and \ (x.get("X-Bugzilla-Component", None) in ["Page editing", "ResourceLoader"]) ), "#mediawiki-parsoid": lambda x: x.get("X-Bugzilla-Product", None) in ["Parsoid"], "#wikimedia-multimedia": lambda x: \ ( x.get("X-Bugzilla-Product", None) in ["MediaWiki extensions"] and x.get("X-Bugzilla-Component", None) in ["UploadWizard", "TimedMediaHandler", "VipsScaler", "GlobalUsage", "MultimediaViewer", "GWToolset", "Score", "PagedTiffHandler", "PdfHandler", "ImageMap", "CommonsMetadata", "OggHandler"] ) or \ ( x.get("X-Bugzilla-Product", None) in ["MediaWiki"] and x.get("X-Bugzilla-Component", None) in ["File management", "Uploading"] ), "#wikimedia-growth": lambda x: ( x.get("X-Bugzilla-Product", None) in ["MediaWiki extensions"] and x.get("X-Bugzilla-Component", None) in ["GuidedTour", "GettingStarted"] ), "#wikimedia-analytics": lambda x: x.get("X-Bugzilla-Product", None) == "Analytics", # The following changes should ALWAYS be in #wikimedia-dev, even if the bugs # are also reported elsewhere. "#wikimedia-dev": lambda x: x.get("X-Bugzilla-Product", None) == "MediaWiki" } default_channel = "#wikimedia-dev" firehose_channel = "#mediawiki-feed"
Python
0.000005
@@ -2564,16 +2564,53 @@ Handler%22 +,%0A %22ImageMetrics%22 %5D%0A
f0dff35e18599061ec5917137ebc18310ae9ffd1
add Indenter vs keepWithNext breakage found by Ilpo Nyyssönen
reportlab/test/test_platypus_breaking.py
reportlab/test/test_platypus_breaking.py
#Copyright ReportLab Europe Ltd. 2000-2004 #see license.txt for license details #history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/test/test_platypus_breaking.py """Tests pageBreakBefore, frameBreakBefore, keepWithNext... """ import sys, os, time from string import split, strip, join, whitespace from operator import truth from types import StringType, ListType from reportlab.test import unittest from reportlab.test.utils import makeSuiteForClasses, outputfile, printLocation from reportlab.platypus.flowables import Flowable from reportlab.lib import colors from reportlab.lib.units import cm from reportlab.lib.enums import TA_LEFT, TA_RIGHT, TA_CENTER, TA_JUSTIFY from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle from reportlab.platypus.paragraph import Paragraph from reportlab.platypus.frames import Frame from reportlab.lib.randomtext import randomText, PYTHON from reportlab.platypus.doctemplate import PageTemplate, BaseDocTemplate from reportlab.platypus.paragraph import * def myMainPageFrame(canvas, doc): "The page frame used for all PDF documents." canvas.saveState() canvas.setFont('Times-Roman', 12) pageNumber = canvas.getPageNumber() canvas.drawString(10*cm, cm, str(pageNumber)) canvas.restoreState() class MyDocTemplate(BaseDocTemplate): _invalidInitArgs = ('pageTemplates',) def __init__(self, filename, **kw): frame1 = Frame(2.5*cm, 15.5*cm, 6*cm, 10*cm, id='F1') frame2 = Frame(11.5*cm, 15.5*cm, 6*cm, 10*cm, id='F2') frame3 = Frame(2.5*cm, 2.5*cm, 6*cm, 10*cm, id='F3') frame4 = Frame(11.5*cm, 2.5*cm, 6*cm, 10*cm, id='F4') self.allowSplitting = 0 self.showBoundary = 1 apply(BaseDocTemplate.__init__, (self, filename), kw) template = PageTemplate('normal', [frame1, frame2, frame3, frame4], myMainPageFrame) self.addPageTemplates(template) def _test0(self): "This makes one long multi-page paragraph." # Build story. story = [] styleSheet = getSampleStyleSheet() h1 = styleSheet['Heading1'] h1.pageBreakBefore = 1 h1.keepWithNext = 1 h2 = styleSheet['Heading2'] h2.frameBreakBefore = 1 h2.keepWithNext = 1 h3 = styleSheet['Heading3'] h3.backColor = colors.cyan h3.keepWithNext = 1 bt = styleSheet['BodyText'] story.append(Paragraph(""" Subsequent pages test pageBreakBefore, frameBreakBefore and keepTogether attributes. Generated at %s. The number in brackets at the end of each paragraph is its position in the story. (%d)""" % ( time.ctime(time.time()), len(story)), bt)) for i in range(10): story.append(Paragraph('Heading 1 always starts a new page (%d)' % len(story), h1)) for j in range(3): story.append(Paragraph('Heading1 paragraphs should always' 'have a page break before. Heading 2 on the other hand' 'should always have a FRAME break before (%d)' % len(story), bt)) story.append(Paragraph('Heading 2 always starts a new frame (%d)' % len(story), h2)) story.append(Paragraph('Heading1 paragraphs should always' 'have a page break before. Heading 2 on the other hand' 'should always have a FRAME break before (%d)' % len(story), bt)) for j in range(3): story.append(Paragraph(randomText(theme=PYTHON, sentences=2)+' (%d)' % len(story), bt)) story.append(Paragraph('I should never be at the bottom of a frame (%d)' % len(story), h3)) story.append(Paragraph(randomText(theme=PYTHON, sentences=1)+' (%d)' % len(story), bt)) doc = MyDocTemplate(outputfile('test_platypus_breaking.pdf')) doc.multiBuild(story) class BreakingTestCase(unittest.TestCase): "Test multi-page splitting of paragraphs (eyeball-test)." def test0(self): _test0(self) def makeSuite(): return makeSuiteForClasses(BreakingTestCase) #noruntests if __name__ == "__main__": #NORUNTESTS if 'debug' in sys.argv: _test0(None) else: unittest.TextTestRunner().run(makeSuite()) printLocation()
Python
0
@@ -996,16 +996,45 @@ Template +, Indenter, SimpleDocTemplate %0Afrom re @@ -4032,16 +4032,478 @@ (self)%0A%0A + def test1(self):%0A '''Ilpo Nyyssnen posted this broken test'''%0A normalStyle = ParagraphStyle(name = 'normal')%0A keepStyle = ParagraphStyle(name = 'keep', keepWithNext = True)%0A content = %5B%0A Paragraph(%22line 1%22, keepStyle),%0A Indenter(left = 1 * cm),%0A Paragraph(%22line 2%22, normalStyle),%0A %5D%0A doc = SimpleDocTemplate(outputfile('test_platypus_breaking1.pdf'))%0A doc.build(content)%0A%0A %0Adef mak
1a8f9212c91a11af2ffb8fbebf86e1ac5d8f4a4c
Fix docstring
batchflow/models/tf/layers/drop_block.py
batchflow/models/tf/layers/drop_block.py
""" Golnaz Ghiasi, Tsung-Yi Lin, Quoc V. Le "`DropBlock: A regularization method for convolutional networks <https://arxiv.org/abs/1810.12890>`_" """ import tensorflow as tf from .pooling import max_pooling # TODO: # When max_pooling allows for dynamic kernel size, implement block_size as fraction # of spatial_dims. # Write predefined callables to control dropout_rate def dropblock(inputs, dropout_rate, block_size, is_training, data_format, global_step=None, seed=None, **kwargs): """ Drop Block module. Parameters ---------- inputs : tf.Tensor Input tensor dropout_rate : float, tf.Tensor or callable. Default is 0 block_size : int or float or tuple of ints or floats Size of the block to drop. If tuple, should be of the same size as spatial dimensions of inputs. If float < 0, block_size is calculated as a fraction of corresponding spatial dimension. is_training : bool or tf.Tensor Default is True. data_format : str `channels_last` or `channels_first`. Default - 'channels_last'. global_step: misc If `dropout_rate` is callable, and `global_step` is passed to it as the first positional argument. seed : int seed to use in tf.distributions.Bernoulli.sample method. Returns ------- tf.Tensor """ if callable(dropout_rate): dropout_rate = dropout_rate(global_step, **kwargs) return tf.cond(tf.logical_or(tf.logical_not(is_training), tf.equal(dropout_rate, 0.0)), true_fn=lambda: inputs, false_fn=lambda: _dropblock(inputs, dropout_rate, block_size, seed, data_format), name='dropblock') def _dropblock(inputs, dropout_rate, block_size, seed, data_format): """ """ one = tf.convert_to_tensor([1], dtype=tf.int32) zeros_pad = tf.convert_to_tensor([[0, 0]], dtype=tf.int32) input_shape = tf.shape(inputs) if data_format == 'channels_first': spatial_dims, channels = input_shape[2:], input_shape[1:2] else: spatial_dims, channels = input_shape[1:-1], input_shape[-1:] spatial_ndim = spatial_dims.get_shape().as_list()[0] if isinstance(block_size, int): block_size = [block_size] * spatial_ndim block_size_tf = tf.convert_to_tensor(block_size) elif isinstance(block_size, tuple): if len(block_size) != spatial_ndim: raise ValueError('Length of `block_size` should be the same as spatial dimensions of input.') block_size_tf = tf.convert_to_tensor(block_size, dtype=tf.int32) else: raise ValueError('block_size should be int or tuple!') block_size_tf = tf.math.minimum(block_size_tf, spatial_dims) block_size_tf = tf.math.maximum(block_size_tf, one) spatial_dims_float = tf.cast(spatial_dims, dtype=tf.float32) block_size_tf_float = tf.cast(block_size_tf, dtype=tf.float32) inner_area = spatial_dims - block_size_tf + one inner_area_float = tf.cast(inner_area, dtype=tf.float32) gamma = (tf.convert_to_tensor(dropout_rate) * tf.math.reduce_prod(spatial_dims_float) / tf.math.reduce_prod(block_size_tf_float) / tf.math.reduce_prod(inner_area_float)) # Mask is sampled for each featuremap independently and applied identically to all batch items noise_dist = tf.distributions.Bernoulli(probs=gamma, dtype=tf.float32) if data_format == 'channels_first': sampling_mask_shape = tf.concat((one, channels, inner_area), axis=0) else: sampling_mask_shape = tf.concat((one, inner_area, channels), axis=0) mask = noise_dist.sample(sampling_mask_shape, seed=seed) left_spatial_pad = (block_size_tf - one) // 2 right_spatial_pad = block_size_tf - one - left_spatial_pad spatial_pads = tf.stack((left_spatial_pad, right_spatial_pad), axis=1) if data_format == 'channels_first': pad_shape = tf.concat((zeros_pad, zeros_pad, spatial_pads), axis=0) else: pad_shape = tf.concat((zeros_pad, spatial_pads, zeros_pad), axis=0) mask = tf.pad(mask, pad_shape) # Using max pool operation to extend sampled points to blocks of desired size pool_size = block_size strides = [1] * spatial_ndim mask = max_pooling(mask, pool_size=pool_size, strides=strides, data_format=data_format, padding='same') mask = tf.cast(1 - mask, tf.float32) output = tf.multiply(inputs, mask) # Scaling the output as in inverted dropout output = output * tf.to_float(tf.size(mask)) / tf.reduce_sum(mask) return output
Python
0.00003
@@ -578,25 +578,26 @@ Input tensor +. %0A - dropout_ @@ -682,17 +682,8 @@ or -float or tupl @@ -695,18 +695,8 @@ ints - or floats %0A @@ -809,113 +809,8 @@ ts.%0A - If float %3C 0, block_size is calculated as a fraction of corresponding spatial%0A dimension.%0A @@ -1116,25 +1116,25 @@ int%0A -s +S eed to use i
b0f0ec664d1a74e71d8ab9192a2eca0b29de07e3
version bump to 1.0.0
moviepy/version.py
moviepy/version.py
__version__ = "0.2.4.0"
Python
0
@@ -12,13 +12,11 @@ = %22 -0.2.4 +1.0 .0%22%0A
403c9fe403a722449ff22d4ac989db00cff4010e
fix AdminExportMixin
leprikon/admin/export.py
leprikon/admin/export.py
import csv from datetime import datetime from functools import partial import django_excel from django.http import HttpResponse from django.utils.encoding import force_text from django.utils.translation import ugettext_lazy as _ def lookup_attr(obj, name): for n in name.split('__'): obj = getattr(obj, n) if callable(obj): obj = obj() return obj class AdminExportMixin: actions = ('export_as_xlsx', 'export_as_csv') def get_list_export(self, request): try: return self.list_export except AttributeError: return self.list_display def get_export_fields(self, request): fields = [] for name in self.get_list_export(request): try: names = name.split('__') field = self.model._meta.get_field(names[0]) for n in names[1:]: field = field.related.model._meta.get_field(n) fields.append({ 'name': field.name, 'verbose_name': field.verbose_name, 'get_value': partial(lambda name, obj: lookup_attr(obj, name), name), }) except Exception: if callable(name): fields.append({ 'name': name.__func__.__name__, 'verbose_name': getattr(name, 'short_description', name.__func__.__name__), 'get_value': partial(lambda name, obj: name(obj), name), }) elif hasattr(self, name): attr = getattr(self, name) fields.append({ 'name': name, 'verbose_name': getattr(attr, 'short_description', name), 'get_value': partial(lambda attr, obj: attr(obj), attr), }) elif hasattr(self.model, name): attr = getattr(self.model, name) fields.append({ 'name': name, 'verbose_name': getattr(attr, 'short_description', name), 'get_value': partial(lambda name, obj: lookup_attr(obj, name), name), }) else: raise Exception('Can not resolve name "{}"'.format(name)) return fields def get_export_data(self, request, queryset): fields = self.get_export_fields(request) yield [force_text(f['verbose_name']) for f in fields] for obj in queryset.all(): values = [] for field in fields: value = field['get_value'](obj) if value is None: value = '' if isinstance(value, datetime): value = value.replace(tzinfo=None) else: # what can not be converted to float, must be converted to string try: float(value) except ValueError: value = force_text(value) values.append(value) yield values def export_as_csv(self, request, queryset): response = HttpResponse(content_type='text/csv') response['Content-Disposition'] = 'attachment; filename="{}.csv"'.format(self.model._meta.model_name) data = self.get_export_data(request, queryset) # write data to response (use all to evaluate the map generator) csv.writer(response).writerows(data) return response export_as_csv.short_description = _('Export selected records as CSV') def export_as_xlsx(self, request, queryset): data = self.get_export_data(request, queryset) response = django_excel.make_response(django_excel.pe.Sheet(data), 'xlsx') response['Content-Disposition'] = 'attachment; filename="{}.xlsx"'.format(self.model._meta.model_name) return response export_as_xlsx.short_description = _('Export selected records as XLSX')
Python
0
@@ -3058,16 +3058,28 @@ except + (TypeError, ValueEr @@ -3077,24 +3077,25 @@ , ValueError +) :%0A
bd45223f8606948936d2c0fa1c104a0c2f13d630
Update 8x8 generator
test/environments/instances/8x8/gen.py
test/environments/instances/8x8/gen.py
#!/usr/bin/python import random import os import errno for i in range(100): s=set() g=set() while len(s) < 20: s.add((random.randint(0,7),random.randint(0,7))) while len(g) < 20: g.add((random.randint(0,7),random.randint(0,7))) start=list(s) goal=list(g) for size in range(2,22,2): if not os.path.exists("./%d"%size): try: os.makedirs("./%d"%size) except OSError as exc: if exc.errno != errno.EEXIST: raise with open("./%d/%d.csv"%(size,i), "w") as f: for j in range(size): f.write("%d,%d %d,%d\n"%(start[j][0],start[j][1],goal[j][0],goal[j][1]))
Python
0
@@ -296,14 +296,12 @@ nge( -2,22 +1 ,2 +1 ):%0A
74e2b12d5ba576c63c8da86698fe93e9701a9d00
delete specific settings from parent app
organization/core/context_processors.py
organization/core/context_processors.py
# -*- coding: utf-8 -*- # # Copyright (c) 2016-2017 Ircam # Copyright (c) 2016-2017 Guillaume Pellerin # Copyright (c) 2016-2017 Emilie Zawadzki # This file is part of mezzanine-organization. # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from mezzanine.conf import settings # import the settings file from datetime import datetime, date from organization.pages.models import Page from organization.network.models import Organization, OrganizationLinkedInline, Person from mezzanine_agenda.models import Season from mezzanine.utils.sites import current_site_id from django.contrib.sites.models import Site def organization_settings(request): date_now = datetime.now() # SEASON current_season, created = Season.objects.get_or_create( start__year=date_now.year, defaults={'title' : 'Season ' + str(date_now.year) + '-' + str(date_now.year + 1), 'start' : date(date_now.year, settings.SEASON_START_MONTH, settings.SEASON_START_DAY), 'end' : date(date_now.year + 1, settings.SEASON_END_MONTH, settings.SEASON_END_DAY)}) current_season_styled = str(current_season.start.year)[-2:]+"."+str(current_season.end.year)[-2:] # NEWSLETTER newsletter_page = Page.objects.filter(slug="newsletter") newsletter_subscribing_url = "" if newsletter_page: newsletter_subscribing_url = newsletter_page.first().get_absolute_url() # HOST ORGANIZATIONS try: site = Site.objects.get(id=current_site_id()) host_org = Organization.objects.get(site=site) except: try: host_org = Organization.objects.filter(is_host=True).first() except: host_org = Organization.objects.first() organization_lists = [] if hasattr(host_org, 'organization_linked_block'): for orga_linked_block in host_org.organization_linked_block.all(): organizations = [] for orga_list in OrganizationLinkedInline.objects.filter(organization_list_id=orga_linked_block.organization_linked_id): organizations.append(orga_list.organization) organization_lists.append(organizations) linked_org_content = organization_lists[0] if len(organization_lists) > 0 else None linked_org_footer = organization_lists[1] if len(organization_lists) > 1 else None linked_org_footer_2 = organization_lists[2] if len(organization_lists) > 2 else None research_slug = "recherche" return {'current_season_year': current_season.start.year, 'current_season_styled': current_season_styled, 'newsletter_subscribing_url': newsletter_subscribing_url, 'host_organization': host_org, 'linked_organization_content' : linked_org_content, 'linked_organization_footer' : linked_org_footer, 'linked_organization_footer_2' : linked_org_footer_2, 'research_slug' : research_slug, 'menu_person_id': settings.MENU_PERSON_ID, 'debug_mode' : settings.DEBUG, 'http_host' : request.environ['HTTP_HOST'] if 'HTTP_HOST' in request.environ else '', 'hal_url' : settings.HAL_URL, 'team_page_id': settings.TEAM_PAGE_ID }
Python
0.000001
@@ -3764,58 +3764,8 @@ RL,%0A - 'team_page_id': settings.TEAM_PAGE_ID%0A
f1e52992e26a32dc76e933dfa8f5800542f8628f
Update parameter
sequence_prediction/sequence_generator/predict.py
sequence_prediction/sequence_generator/predict.py
#!/usr/bin/env python # ---------------------------------------------------------------------- # Numenta Platform for Intelligent Computing (NuPIC) # Copyright (C) 2015, Numenta, Inc. Unless you have an agreement # with Numenta, Inc., for a separate license for this software code, the # following terms and conditions apply: # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see http://www.gnu.org/licenses. # # http://numenta.org/licenses/ # ---------------------------------------------------------------------- import operator import random import time from matplotlib import pyplot import numpy from nupic.data.inference_shifter import InferenceShifter from nupic.frameworks.opf.modelfactory import ModelFactory from nupic.research.monitor_mixin.trace import CountsTrace from sequence_generator import SequenceGenerator MIN_ORDER = 3 MAX_ORDER = 4 NUM_PREDICTIONS = 1 MODEL_PARAMS = { "model": "CLA", "version": 1, "predictAheadTime": None, "modelParams": { "inferenceType": "TemporalMultiStep", "sensorParams": { "verbosity" : 0, "encoders": { "element": { "fieldname": u"element", "name": u"element", "type": "SDRCategoryEncoder", "categoryList": range(SequenceGenerator.numSymbols(MAX_ORDER, NUM_PREDICTIONS)), "n": 2048, "w": 41 } }, "sensorAutoReset" : None, }, "spEnable": False, "spParams": { "spVerbosity" : 0, "globalInhibition": 1, "columnCount": 2048, "inputWidth": 0, "numActiveColumnsPerInhArea": 40, "seed": 1956, "columnDimensions": 0.5, "synPermConnected": 0.1, "synPermActiveInc": 0.1, "synPermInactiveDec": 0.01, "maxBoost": 0.0 }, "tpEnable" : True, "tpParams": { "verbosity": 0, "columnCount": 2048, "cellsPerColumn": 32, "inputWidth": 2048, "seed": 1960, "temporalImp": "tm_py", "newSynapseCount": 40, "maxSynapsesPerSegment": 128, "maxSegmentsPerCell": 128, "initialPerm": 0.21, "connectedPerm": 0.50, "permanenceInc": 0.1, "permanenceDec": 0.1, "globalDecay": 0.0, "maxAge": 0, "minThreshold": 15, "activationThreshold": 15, "outputType": "normal", "pamLength": 1, }, "clParams": { "implementation": "cpp", "regionName" : "CLAClassifierRegion", "clVerbosity" : 0, "alpha": 0.0001, "steps": "1", }, "trainSPNetOnlyIfRequested": False, }, } def generateSequences(): generator = SequenceGenerator(seed=42) sequences = [] for order in xrange(MIN_ORDER, MAX_ORDER+1): sequences += generator.generate(order, NUM_PREDICTIONS) for sequence in sequences: print sequence return sequences def movingAverage(a, n=3) : weights = numpy.repeat(1.0, n)/n return numpy.convolve(a, weights, 'valid') def plotAccuracy(correct, window=1000): if len(correct) > window: accuracy = movingAverage(correct, n=window) pyplot.plot(range(len(accuracy)), accuracy) else: pyplot.text(0, 0, "Waiting for data...", fontsize=24) def plotTraces(tm, timestamp=int(time.time()), window=500): """ Have to make the following change in NuPIC for this to work: --- a/nupic/research/TP_shim.py +++ b/nupic/research/TP_shim.py @@ -27,10 +27,13 @@ for use with OPF. import numpy from nupic.research.temporal_memory import TemporalMemory +from nupic.research.monitor_mixin.temporal_memory_monitor_mixin import ( + TemporalMemoryMonitorMixin) +class MonitoredTemporalMemory(TemporalMemoryMonitorMixin, TemporalMemory): pass -class TPShim(TemporalMemory): +class TPShim(MonitoredTemporalMemory): """ traces = tm.mmGetDefaultTraces() traces = [trace for trace in traces if type(trace) is CountsTrace] t = len(traces) for i in xrange(t): trace = traces[i] pyplot.subplot(t, 1, i+1) pyplot.title(trace.title) pyplot.xlim(max(len(trace.data)-window, 0), len(trace.data)) pyplot.plot(range(len(trace.data)), trace.data) pyplot.draw() # pyplot.savefig("tm-{0}.png".format(timestamp)) if __name__ == "__main__": model = ModelFactory.create(MODEL_PARAMS) model.enableInference({"predictedField": "element"}) shifter = InferenceShifter() sequences = generateSequences() correct = [] pyplot.ion() pyplot.show() from pylab import rcParams rcParams.update({'figure.autolayout': True}) rcParams.update({'figure.facecolor': 'white'}) rcParams.update({'ytick.labelsize': 8}) for i in xrange(100000000): sequence = random.choice(sequences) for j, element in enumerate(sequence): result = shifter.shift(model.run({"element": element})) # print element, result.inferences["multiStepPredictions"][1] if j == len(sequence) - 1: bestPredictions = sorted(result.inferences["multiStepPredictions"][1].items(), key=operator.itemgetter(1), reverse=True) topPredictions = [int(round(a)) for a, b in bestPredictions[:NUM_PREDICTIONS]] print "Evaluation:", element, topPredictions, element in topPredictions correct.append(element in topPredictions) if i % 100 == 0: rcParams.update({'figure.figsize': (12, 6)}) pyplot.figure(1) pyplot.clf() plotAccuracy(correct) pyplot.draw() # rcParams.update({'figure.figsize': (6, 12)}) # pyplot.figure(2) # pyplot.clf() # tm = model._getTPRegion().getSelf()._tfdr # plotTraces(tm) model.resetSequenceStates()
Python
0.000001
@@ -2559,17 +2559,17 @@ Count%22: -4 +2 0,%0A
1a8c06e655b622e7504a615c902ddb9b278f6470
add urdu mapping [skip ci]
custom/icds/translations/integrations/const.py
custom/icds/translations/integrations/const.py
from __future__ import absolute_import from __future__ import unicode_literals API_USER = "api" SOURCE_LANGUAGE_MAPPING = { # 'hq_code' : 'transifex_code' 'hin': 'hi', # hindi 'ori': 'or', # oriya 'tam': 'ta', # tamil 'pan': 'pa', # punjabi 'asm': 'as', # assamese 'ben': 'bn', # bengali 'guj': 'gu', # gujarati 'mal': 'ml', # malayalam 'mar': 'mr', # marathi 'snd': 'sd', # sindhi for test 'mri': 'mi', # maori 'khm': 'km', # khmer 'lug': 'lg', # ganda 'tel': 'te', # telugu }
Python
0.000014
@@ -543,10 +543,35 @@ telugu%0A + 'urd': 'ur', # urdu%0A %7D%0A
e98d141a281aaaa5af9e03afdd7a9543cf7e795a
Fix salt.states.selinux._refine_value()
salt/states/selinux.py
salt/states/selinux.py
''' Management of SELinux rules. ============================ If SELinux is available for the running system, the mode can be managed and booleans can be set. .. code-block:: yaml enforcing: selinux.mode samba_create_home_dirs: selinux.boolean: - value: True - persist: True ''' def _refine_mode(mode): ''' Return a mode value that is completely predictable ''' if any([ str(mode).startswith('e'), str(mode) == '1', str(mode).startswith('E'), str(mode) == 'on']): return 'Enforcing' if any([ str(mode).startswith('p'), str(mode) == '0', str(mode).startswith('P'), str(mode) == 'off']): return 'Permissive' return 'unknown' def _refine_value(value): ''' Return a value that is completely predictable ''' if any([ str(value) == '1', str(value) == 'on']): return 'on' if any([ str(value) == '0', str(value) == 'off']): return 'off' def mode(name): ''' Verifies the mode SELinux is running in, can be set to enforcing or permissive name The mode to run SELinux in, permissive or enforcing ''' ret = {'name': name, 'result': False, 'comment': '', 'changes': {}} tmode = _refine_mode(name) if tmode == 'unknown': ret['comment'] = '{0} is not an accepted mode'.format(name) return ret mode = __salt__['selinux.getenforce']() if mode == tmode: ret['result'] = True ret['comment'] = 'SELinux is already in {0} mode'.format(tmode) return ret # The mode needs to change... if __opts__['test']: ret['comment'] = 'SELinux mode is set to be changed to {0}'.format( tmode) ret['result'] = None return ret mode = __salt__['selinux.setenforce'](tmode) if mode == tmode: ret['result'] = True ret['comment'] = 'SELinux has been set to {0} mode'.format(tmode) return ret ret['comment'] = 'Failed to set SELinux to {0} mode'.format(tmode) return ret def boolean(name, value, persist=False): ''' Set up an SELinux boolean name The name of the boolean to set value The value to set on the boolean persist Defaults to False, set persist to true to make the boolean apply on a reboot ''' ret = {'name': name, 'result': True, 'comment': '', 'changes': {}} bools = __salt__['selinux.list_sebool']() if name not in bools: ret['comment'] = 'Boolean {0} is not available'.format(name) ret['result'] = False return ret value = _refine_value(value) state = bools[name]['State'] == value default = bools[name]['Default'] == value if persist: if state and default: ret['comment'] = 'Boolean is in the correct state' return ret else: if state: ret['comment'] = 'Boolean is in the correct state' return ret if __opts__['test']: ret['result'] = None ret['comment'] = 'Boolean {0} is set to be changed to {1}'.format( name, value) return ret if __salt__['selinux.setsebool'](name, value, persist): ret['comment'] = 'Boolean {0} has been set to {1}'.format(name, value) return ret ret['comment'] = 'Failed to set the boolean {0} to {1}'.format(name, value) return ret
Python
0
@@ -383,35 +383,24 @@ lue that is -completely predictable%0A @@ -415,34 +415,49 @@ -if any(%5B%0A str( +mode = str(mode).lower()%0A if any(%5B mode -) .sta @@ -474,33 +474,32 @@ '),%0A -str( + mode -) == '1',%0A @@ -507,52 +507,16 @@ -str(mode).startswith('E'),%0A str( + mode -) == @@ -562,34 +562,20 @@ if any(%5B -%0A str( mode -) .startsw @@ -588,33 +588,32 @@ '),%0A + -str( + mode -) == '0',%0A @@ -621,52 +621,16 @@ -str(mode).startswith('P'),%0A str( + mode -) == @@ -630,32 +630,36 @@ ode == 'off'%5D):%0A + return ' @@ -744,218 +744,223 @@ n a -value that is completely predictable%0A '''%0A if any(%5B%0A str(value) == '1',%0A str(value) == 'on'%5D):%0A return 'on'%0A if any(%5B%0A str(value) == '0',%0A str(value) == +yes/no value, or None if the input is invalid%0A '''%0A value = str(value).lower()%0A if value in ('1', 'on', 'yes', 'true'):%0A return 'on'%0A if value in ('0', 'off', 'no', 'false'):%0A return 'off' -%5D): %0A - @@ -966,21 +966,20 @@ return -'off' +None %0A%0A%0Adef m @@ -2673,24 +2673,25 @@ urn ret%0A +r value = _ref @@ -2703,24 +2703,208 @@ alue(value)%0A + if rvalue is None:%0A ret%5B'comment'%5D = '%7B0%7D is not a valid value for the ' %5C%0A 'boolean'.format(value)%0A ret%5B'result'%5D = False%0A return ret%0A state = @@ -2923,24 +2923,25 @@ 'State'%5D == +r value%0A de @@ -2974,16 +2974,17 @@ lt'%5D == +r value%0A @@ -3374,24 +3374,25 @@ name, +r value)%0A @@ -3445,24 +3445,25 @@ ool'%5D(name, +r value, persi @@ -3531,32 +3531,33 @@ %7D'.format(name, +r value)%0A r @@ -3635,24 +3635,25 @@ ormat(name, +r value)%0A r
d9304cd7c19e29fc24ba474a5c7983ce3bb88a2b
Fix benchmark name
lib/node_modules/@stdlib/types/ndarray/ind2sub/benchmark/python/numpy/benchmark.py
lib/node_modules/@stdlib/types/ndarray/ind2sub/benchmark/python/numpy/benchmark.py
#!/usr/bin/env python """Benchmark numpy.unravel_index.""" from __future__ import print_function import timeit NAME = "unravel_index" REPEATS = 3 ITERATIONS = 1000000 def print_version(): """Print the TAP version.""" print("TAP version 13") def print_summary(total, passing): """Print the benchmark summary. # Arguments * `total`: total number of tests * `passing`: number of passing tests """ print("#") print("1.." + str(total)) # TAP plan print("# total " + str(total)) print("# pass " + str(passing)) print("#") print("# ok") def print_results(elapsed): """Print benchmark results. # Arguments * `elapsed`: elapsed time (in seconds) # Examples ``` python python> print_results(0.131009101868) ``` """ rate = ITERATIONS / elapsed print(" ---") print(" iterations: " + str(ITERATIONS)) print(" elapsed: " + str(elapsed)) print(" rate: " + str(rate)) print(" ...") def benchmark(): """Run the benchmark and print benchmark results.""" setup = "import numpy as np; from random import random;" stmt = "y = np.unravel_index(int(random()*1000.0), (10,10,10))" t = timeit.Timer(stmt, setup=setup) print_version() for i in xrange(REPEATS): print("# python::numpy::" + NAME) elapsed = t.timeit(number=ITERATIONS) print_results(elapsed) print("ok " + str(i+1) + " benchmark finished") print_summary(REPEATS, REPEATS) def main(): """Run the benchmark.""" benchmark() if __name__ == "__main__": main()
Python
0.001182
@@ -114,29 +114,23 @@ NAME = %22 -unravel_index +ind2sub %22%0AREPEAT
deb749252a83f59c0bfee3b14abafc5582fb3986
fix 500 - closes #20
letsmeet/events/views.py
letsmeet/events/views.py
from rules.contrib.views import PermissionRequiredMixin from django.shortcuts import redirect from django.contrib.auth.mixins import LoginRequiredMixin from django.views.generic import ( CreateView, DetailView, UpdateView, ) from .models import Event, EventRSVP, EventComment from .forms import EventUpdateForm, EventCommentCreateForm class CommunityEventMixin: def get_object(self, queryset=None): obj = Event.objects.get( slug=self.kwargs.get('slug'), community__slug=self.kwargs.get('community_slug')) return obj class EventUpdateView(LoginRequiredMixin, PermissionRequiredMixin, CommunityEventMixin, UpdateView): model = Event template_name = 'events/event_update.html' permission_required = 'event.can_edit' form_class = EventUpdateForm class EventDetailView(CommunityEventMixin, DetailView): model = Event def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) context['comment_form'] = EventCommentCreateForm() return context class EventRSVPView(LoginRequiredMixin, PermissionRequiredMixin, CommunityEventMixin, DetailView): model = Event template_name = 'events/event_rsvp.html' permission_required = 'event.can_rsvp' allowed_methods = ['post'] def post(self, request, *args, **kwargs): event = self.get_object() answer = self.kwargs.get('answer') if answer == 'reset': try: EventRSVP.objects.get(event=event, user=request.user).delete() except EventRSVP.DoesNotExist: pass else: EventRSVP.objects.get_or_create( event=event, user=request.user, defaults={ 'coming': True if answer == 'yes' else False } ) return redirect(event) class EventCommentCreateView(LoginRequiredMixin, PermissionRequiredMixin, CommunityEventMixin, CreateView): model = EventComment form_class = EventCommentCreateForm template_name = 'events/eventcomment_create.html' permission_required = 'event.can_create_comment' def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) context['event'] = self.get_object() return context def form_valid(self, form): comment = form.save(commit=False) comment.event = self.get_object() comment.user = self.request.user comment.save() return redirect(comment.event)
Python
0.000001
@@ -87,16 +87,35 @@ redirect +, get_object_or_404 %0Afrom dj @@ -443,32 +443,52 @@ -obj = Event.objects.get( +return get_object_or_404(%0A Event, %0A @@ -525,16 +525,28 @@ 'slug'), +%0A communi @@ -593,27 +593,8 @@ '))%0A - return obj%0A %0A%0Acl
9b46fc08cafeef07ffba6569a1a30b730490dd83
Improve pole.py day change calculation
bot/action/extra/pole.py
bot/action/extra/pole.py
from bot.action.core.action import Action from bot.api.domain import Message, Chat SECONDS_IN_A_DAY = 86400 OFFSET_FROM_UTC_IN_SECONDS = 2 * 3600 class PoleAction(Action): def process(self, event): if event.global_gap_detected: # reset everything event.state.last_message_timestamp = None event.state.current_day_message_count = None event.state.current_day_first_messages = None current_message_timestamp = event.message.date previous_message_timestamp = event.state.last_message_timestamp event.state.last_message_timestamp = str(current_message_timestamp) if previous_message_timestamp is not None: current_message_seconds = self.get_seconds_within_day(current_message_timestamp) previous_message_seconds = self.get_seconds_within_day(int(previous_message_timestamp)) if current_message_seconds < previous_message_seconds: # day change: pole event.state.current_day_message_count = str(1) event.state.current_day_first_messages = self.get_formatted_message_to_store(event.message) else: current_day_message_count = event.state.current_day_message_count if current_day_message_count is not None: current_day_message_count = int(current_day_message_count) if current_day_message_count < 3: current_day_message_count += 1 event.state.current_day_first_messages += "\n" + self.get_formatted_message_to_store(event.message) event.state.current_day_message_count = str(current_day_message_count) if current_day_message_count == 3: pole_message, subpole_message, subsubpole_message = event.state.current_day_first_messages.splitlines() self.send_message(pole_message, "pole") self.send_message(subpole_message, "subpole") self.send_message(subsubpole_message, "subsubpole") event.state.current_day_message_count = None def send_message(self, reply_to, text): message_id, chat_id = reply_to.split() self.api.send_message(Message.create(Chat(id=chat_id), text, reply_to_message_id=message_id)) @staticmethod def get_seconds_within_day(timestamp): return (timestamp + OFFSET_FROM_UTC_IN_SECONDS) % SECONDS_IN_A_DAY @staticmethod def get_formatted_message_to_store(message): return str(message.message_id) + " " + str(message.chat.id)
Python
0.000001
@@ -700,39 +700,35 @@ current_message_ -seconds +day = self.get_seco @@ -723,34 +723,26 @@ elf.get_ -seconds_within_day +day_number (current @@ -790,23 +790,19 @@ message_ -seconds +day = self. @@ -805,34 +805,26 @@ elf.get_ -seconds_within_day +day_number (int(pre @@ -885,17 +885,14 @@ age_ -seconds %3C +day != pre @@ -905,23 +905,19 @@ message_ -seconds +day : # day @@ -2381,26 +2381,18 @@ get_ -seconds_within_day +day_number (tim @@ -2460,9 +2460,10 @@ DS) -%25 +// SEC
745ab5a354299207cbe069c5c5d2632ef8c54c07
version bump
seed_stage_based_messaging/__init__.py
seed_stage_based_messaging/__init__.py
__version__ = '0.9.12' VERSION = __version__
Python
0.000001
@@ -17,9 +17,9 @@ .9.1 -2 +3 '%0AVE
420118a9e238c744040e72937bd7cacae728f74b
update ilsgateway location type test
custom/ilsgateway/tests/test_locations_sync.py
custom/ilsgateway/tests/test_locations_sync.py
from datetime import datetime import json import os from django.test import TestCase from corehq.apps.commtrack.tests.util import bootstrap_domain as initial_bootstrap from corehq.apps.locations.models import Location, SQLLocation from custom.ilsgateway.api import Location as Loc, ILSGatewayAPI from custom.ilsgateway.tests.mock_endpoint import MockEndpoint from custom.logistics.commtrack import synchronization from custom.logistics.models import MigrationCheckpoint TEST_DOMAIN = 'ilsgateway-commtrack-locations-test' class LocationSyncTest(TestCase): def setUp(self): self.endpoint = MockEndpoint('http://test-api.com/', 'dummy', 'dummy') self.api_object = ILSGatewayAPI(TEST_DOMAIN, self.endpoint) self.datapath = os.path.join(os.path.dirname(__file__), 'data') initial_bootstrap(TEST_DOMAIN) for location in Location.by_domain(TEST_DOMAIN): location.delete() def test_create_location(self): with open(os.path.join(self.datapath, 'sample_locations.json')) as f: location = Loc(**json.loads(f.read())[1]) ilsgateway_location = self.api_object.location_sync(location) self.assertEqual(ilsgateway_location.name, location.name) self.assertEqual(ilsgateway_location.location_type, location.type) self.assertEqual(ilsgateway_location.longitude, float(location.longitude)) self.assertEqual(ilsgateway_location.latitude, float(location.latitude)) self.assertEqual(ilsgateway_location.parent, location.parent_id) def test_locations_migration(self): checkpoint = MigrationCheckpoint( domain=TEST_DOMAIN, start_date=datetime.now(), date=datetime.now(), api='product', limit=100, offset=0 ) synchronization('location_facility', self.endpoint.get_locations, self.api_object.location_sync, checkpoint, None, 100, 0, filters=dict(type='facility')) self.assertEqual('location_facility', checkpoint.api) self.assertEqual(100, checkpoint.limit) self.assertEqual(0, checkpoint.offset) self.assertEqual(4, len(list(Location.by_domain(TEST_DOMAIN)))) self.assertEqual(4, SQLLocation.objects.filter(domain=TEST_DOMAIN).count()) sql_location = SQLLocation.objects.get(domain=TEST_DOMAIN, site_code='DM520053') self.assertEqual('FACILITY', sql_location.location_type) self.assertIsNotNone(sql_location.supply_point_id) sql_location2 = SQLLocation.objects.get(domain=TEST_DOMAIN, site_code='region-dodoma') self.assertEqual('REGION', sql_location2.location_type) self.assertIsNone(sql_location2.supply_point_id)
Python
0
@@ -2472,32 +2472,37 @@ on.location_type +.name )%0A self.a @@ -2700,24 +2700,29 @@ ocation_type +.name )%0A se
7f49672497104455d0276ac863f4b2791ee4c7bd
Replace Video constructor with generic class of subclasses.
mr/video/opencv.py
mr/video/opencv.py
import os import cv2 import numpy as np class Frame(object): """Iterable object that returns frames of video as numpy arrays of integers 0-255. Parameters ---------- filename : string gray : Convert color image to grayscale. True by default. invert : Invert black and white. True by default. Examples -------- >>> video = Video('filename') >>> imshow(video.next()) # Show the first frame. >>> imshow(video.next()[0:10][0:10]) # Show one corner of the second frame. >>> video.rewind() >>> imshow(video.next()) # First frame again. >>> for frame in video: ... # Do something with every frame. >>> for frame in video[10:20]: ... # Do something with frames 10-20. >>> frame_count = video.count # Number of frames in video """ def __init__(self, filename, gray=True, invert=True): self.filename = filename self.gray = gray self.invert = invert self.capture = self._open(self.filename) self.shape = (self.capture.get(cv2.cv.CV_CAP_PROP_FRAME_WIDTH), self.capture.get(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT)) self.cursor = 0 self.count = self._count() self.endpoint = None def __repr__(self): return """<Video Frames> Source File: %s Frame Dimensions: %d x %d Cursor at Frame %d of %d""" % (self.filename, self.shape[0], self.shape[1], self.cursor, self.count) def __iter__(self): return self def _process(self, frame): if self.gray: frame = cv2.cvtColor(frame, cv2.cv.CV_RGB2GRAY) if self.invert: frame *= -1 frame += 255 return frame @property def endpoint(self): return self._endpoint @endpoint.setter def endpoint(self, val): self._endpoint = val def _count(self): "Return total frame count. Result is not always exact." return int(self.capture.get(cv2.cv.CV_CAP_PROP_FRAME_COUNT)) def seek_forward(self, val): for _ in range(val): self.next() def rewind(self): """Reopen the video file to start at the beginning. ('Seeking' capabilities in the underlying OpenCV library are not reliable.)""" self.capture = self._open(self.filename) self.cursor = 0 def next(self): if self.endpoint is not None and self.cursor > self.endpoint: raise StopIteration return_code, frame = self.capture.read() if not return_code: # A failsafe: the frame count is not always accurate. raise StopIteration frame = self._process(frame) self.cursor += 1 return frame def __getitem__(self, val): if isinstance(val, slice): start, stop, step = val.indices(self.count) if step != 1: raise NotImplementedError, \ "Step must be 1." else: start = val stop = None video_copy = Video(self.filename, self.gray, self.invert) video_copy.seek_forward(start) video_copy.endpoint = stop return video_copy def open_video(filename): """Thin convenience function for return an opencv2 Capture object.""" # ffmpeg -i unreadable.avi -sameq -r 30 readable.avi if not os.path.isfile(filename): raise ValueError, "%s is not a file." % filename capture = cv2.VideoCapture(filename) return capture class Video(Frame): def _open(self, filename): return open_video(filename)
Python
0
@@ -3071,21 +3071,30 @@ _copy = -Video +self.__class__ (self.fi
72da5e5a0137215c7a7ea72496a6fdfe5d830f68
set extend_existing on redefined usershub models
backend/geonature/core/users/models.py
backend/geonature/core/users/models.py
from sqlalchemy import ForeignKey from pypnusershub.db.models import User from utils_flask_sqla.serializers import serializable from sqlalchemy.sql import select, func from sqlalchemy.dialects.postgresql import UUID from geonature.utils.env import DB @serializable class VUserslistForallMenu(DB.Model): __tablename__ = "v_userslist_forall_menu" __table_args__ = {"schema": "utilisateurs"} id_role = DB.Column(DB.Integer, primary_key=True) nom_role = DB.Column(DB.Unicode) prenom_role = DB.Column(DB.Unicode) nom_complet = DB.Column(DB.Unicode) id_menu = DB.Column(DB.Integer, primary_key=True) @serializable class BibOrganismes(DB.Model): __tablename__ = "bib_organismes" __table_args__ = {"schema": "utilisateurs"} id_organisme = DB.Column(DB.Integer, primary_key=True) uuid_organisme = DB.Column( UUID(as_uuid=True), default=select([func.uuid_generate_v4()]) ) nom_organisme = DB.Column(DB.Unicode) cp_organisme = DB.Column(DB.Unicode) ville_organisme = DB.Column(DB.Unicode) tel_organisme = DB.Column(DB.Unicode) fax_organisme = DB.Column(DB.Unicode) email_organisme = DB.Column(DB.Unicode) @serializable class CorRole(DB.Model): __tablename__ = "cor_roles" __table_args__ = {"schema": "utilisateurs"} id_role_groupe = DB.Column( DB.Integer, ForeignKey("utilisateurs.t_roles.id_role"), primary_key=True ) id_role_utilisateur = DB.Column(DB.Integer, primary_key=True) role = DB.relationship( User, primaryjoin=(User.id_role == id_role_groupe), foreign_keys=[id_role_groupe], ) def __init__(self, id_group, id_role): self.id_role_groupe = id_group self.id_role_utilisateur = id_role @serializable class TApplications(DB.Model): __tablename__ = "t_applications" __table_args__ = {"schema": "utilisateurs", "extend_existing": True} id_application = DB.Column(DB.Integer, primary_key=True) nom_application = DB.Column(DB.Unicode) desc_application = DB.Column(DB.Unicode) id_parent = DB.Column(DB.Integer) class UserRigth: def __init__( self, id_role=None, id_organisme=None, code_action=None, value_filter=None, module_code=None, nom_role=None, prenom_role=None, ): self.id_role = id_role self.id_organisme = id_organisme self.value_filter = value_filter self.code_action = code_action self.module_code = module_code self.nom_role = nom_role self.prenom_role = prenom_role @serializable class TListes(DB.Model): __tablename__ = "t_listes" __table_args__ = {"schema": "utilisateurs", "extend_existing": True} id_liste = DB.Column(DB.Integer, primary_key=True) code_liste = DB.Column(DB.Unicode) nom_liste = DB.Column(DB.Unicode) desc_liste = DB.Column(DB.Integer)
Python
0
@@ -740,32 +740,57 @@ : %22utilisateurs%22 +, %22extend_existing%22: True %7D%0A id_organis @@ -1308,32 +1308,57 @@ : %22utilisateurs%22 +, %22extend_existing%22: True %7D%0A id_role_gr
c4ea2d360ebf0fc9b9d9494ed43e540eaf2282d8
add compatibility import
dask/array/into.py
dask/array/into.py
from __future__ import absolute_import, division, print_function import numpy as np from toolz import merge, accumulate from into import discover, convert, append, into from datashape.dispatch import dispatch from datashape import DataShape from operator import add import itertools from .core import rec_concatenate, Array, getem, get, names, from_array from ..core import flatten @discover.register(Array) def discover_dask_array(a, **kwargs): block = a._get_block(*([0] * a.ndim)) return DataShape(*(a.shape + (discover(block).measure,))) arrays = [np.ndarray] try: import h5py arrays.append(h5py.Dataset) @dispatch(h5py.Dataset, (int, long)) def resize(x, size): s = list(x.shape) s[0] = size return resize(x, tuple(s)) @dispatch(h5py.Dataset, tuple) def resize(x, shape): return x.resize(shape) except ImportError: pass try: import bcolz arrays.append(bcolz.carray) @dispatch(bcolz.carray, (int, long)) def resize(x, size): return x.resize(size) except ImportError: pass @convert.register(Array, tuple(arrays), cost=0.01) def array_to_dask(x, name=None, blockshape=None, **kwargs): return from_array(x, blockshape=blockshape, name=name, **kwargs) @convert.register(np.ndarray, Array, cost=0.5) def dask_to_numpy(x, **kwargs): return rec_concatenate(get(x.dask, x._keys(), **kwargs)) @convert.register(float, Array, cost=0.5) def dask_to_float(x, **kwargs): return x.compute() @append.register(tuple(arrays), Array) def store_Array_in_ooc_data(out, arr, inplace=False, **kwargs): if not inplace: # Resize output dataset to accept new data assert out.shape[1:] == arr.shape[1:] resize(out, out.shape[0] + arr.shape[0]) # elongate return arr.store(out)
Python
0
@@ -376,16 +376,49 @@ flatten%0A +from ..compatibility import long%0A %0A%0A@disco
f346daeb6d75c3235d52a98f54b91e03999e228f
Configure CORS to accept requests form web application
backend/restapp/restapp/settings.py
backend/restapp/restapp/settings.py
""" Django settings for restapp project. Generated by 'django-admin startproject' using Django 1.9.4. For more information on this file, see https://docs.djangoproject.com/en/1.9/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.9/ref/settings/ """ import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'kzo)k7gbjhydqxb+*sbeu)zyr7f8vy7slt*ru!3rl@8zf@ga(d' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'rest_framework', 'books.apps.BooksConfig', ] MIDDLEWARE_CLASSES = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'restapp.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'restapp.wsgi.application' # Configuration for rest-framework REST_FRAMEWORK = { # Use Django's standard `django.contrib.auth` permissions, # or allow read-only access for unauthenticated users. 'DEFAULT_PERMISSION_CLASSES': [ 'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly' ] } # Database # https://docs.djangoproject.com/en/1.9/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Password validation # https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/1.9/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.9/howto/static-files/ STATIC_URL = '/static/'
Python
0.000001
@@ -1055,24 +1055,43 @@ aticfiles',%0A + 'corsheaders',%0A 'rest_fr @@ -1261,32 +1261,77 @@ ionMiddleware',%0A + 'corsheaders.middleware.CorsMiddleware',%0A 'django.midd @@ -3573,8 +3573,67 @@ tatic/'%0A +%0ACORS_ORIGIN_WHITELIST = (%0A 'localhost:8080',%0A )%0A
83a1ccd0cd657b08fd6bd43b212d1b526f466474
Use unicodecode instead of unaccent postgresql because this extension is not in postgresql 8.4
server/localfinance/views.py
server/localfinance/views.py
# -*- coding: utf-8 -*- import os import json from sqlalchemy import func from pyramid.view import view_config from pyramid.response import FileResponse from cornice import Service from cornice.resource import resource, view from .models import AdminZoneFinance, DBSession, AdminZone, Stats as StatsModel, ADMIN_LEVEL_CITY from .maps import timemap_registry, MAPS_CONFIG city_search = Service(name='city_search', path='/city_search', description="city search") @city_search.get() def get_city(request): term = request.params['term'] results = DBSession.query(AdminZone.id, AdminZone.name, AdminZone.code_insee, func.ST_X(func.ST_Centroid(AdminZone.geometry)), func.ST_Y(func.ST_Centroid(AdminZone.geometry)))\ .filter(func.lower(AdminZone.name).like(func.lower(func.unaccent(term+"%"))))\ .filter(AdminZone.admin_level==ADMIN_LEVEL_CITY)\ .order_by(func.similarity(AdminZone.name, term).desc()).all() def format(result): return {'id': result[0], 'name': result[1], 'code_insee': result[2], 'lng': result[3], 'lat': result[4]} return {'results': [format(res) for res in results]} @resource(collection_path='/timemaps', path='/timemap/{id}') class TimeMap(object): def __init__(self, request): self.request = request def get(self): id = self.request.matchdict['id'] return {'results': {'var_name': id, 'maps': [m.info for m in timemap_registry[id]]}} def collection_get(self): return {'results': [{'var_name': key, 'maps': [m.info for m in timemap_registry[key]]} for key in MAPS_CONFIG.keys()]} @resource(collection_path='/finance', path='/finance/{id}') class AZFinance(object): def __init__(self, request): self.request = request def get(self): id = self.request.matchdict['id'] res = DBSession.query(AdminZone.name, AdminZone.code_insee, AdminZone.code_department, AdminZoneFinance.year, AdminZoneFinance.data).join(AdminZoneFinance, AdminZone.id==AdminZoneFinance.adminzone_id).filter(AdminZone.id==id).order_by('year').all() return {'results': res} @resource(collection_path='/stats', path='/stat/{id}') class Stats(object): def __init__(self, request): self.request = request def get(self): id = self.request.matchdict['id'] stat = DBSession.query(StatsModel).filter(StatsModel.name==id).first() return {'results': {'mean_by_year': json.loads(stat.data['mean_by_year']), 'var_name': id}} def collection_get(self): stats = DBSession.query(StatsModel).all() return {'results': [{'mean_by_year': json.loads(stat.data['mean_by_year']), 'var_name': stat.name} for stat in stats]} # XXX: view set for development purpose only from pyramid.response import FileResponse def index(request): html_file = os.path.join(request.registry.settings['app_dir'], 'index.html') return FileResponse(html_file)
Python
0.000002
@@ -40,16 +40,33 @@ rt json%0A +import unidecode%0A from sql @@ -550,16 +550,112 @@ 'term'%5D%0A + term_ascii = unicodedata.normalize('NFKD', unicode(term)).encode('ascii', 'ignore').lower()%0A resu @@ -937,45 +937,24 @@ ike( -func.lower(func.unaccent(term +term_ascii +%22%25%22)) -)) %5C%0A
c1d73206436389f27187f3b52ff0daf6e106918c
Fix serialization of Marathon Constraints
marathon/models/constraint.py
marathon/models/constraint.py
from ..exceptions import InvalidOperatorError from .base import MarathonObject class MarathonConstraint(MarathonObject): """Marathon placement constraint. See https://mesosphere.github.io/marathon/docs/constraints.html :param str field: constraint operator target :param str operator: must be one of [UNIQUE, CLUSTER, GROUP_BY] :param value: [optional] if `operator` is CLUSTER, constrain tasks to servers where `field` == `value`. If `operator` is GROUP_BY, place at most `value` tasks per group :type value: str, int, or None """ OPERATORS = ['UNIQUE', 'CLUSTER', 'GROUP_BY'] """Valid operators""" def __init__(self, field, operator, value=None): if not operator in self.OPERATORS: raise InvalidOperatorError(operator) self.field = field self.operator = operator self.value = value def __repr__(self): if self.value: template = "MarathonConstraint::{field}:{operator}:{value}" else: template = "MarathonConstraint::{field}:{operator}" return template.format(**self.__dict__) @classmethod def json_decode(cls, obj): """Construct a MarathonConstraint from a parsed response. :param dict attributes: object attributes from parsed response :rtype: :class:`MarathonConstraint` """ if len(obj) == 2: (field, operator) = obj return cls(field, operator) if len(obj) > 2: (field, operator, value) = obj return cls(field, operator, value) def json_encode(self): """Construct a JSON-friendly representation of the object. :rtype: dict """ if self.value: return [self.field, self.operator, self.value] else: return [self.field, self.operator]
Python
0.002336
@@ -1118,16 +1118,286 @@ ict__)%0A%0A + def json_repr(self):%0A %22%22%22Construct a JSON-friendly representation of the object.%0A%0A :rtype: list%0A %22%22%22%0A if self.value:%0A return %5Bself.field, self.operator, self.value%5D%0A else:%0A return %5Bself.field, self.operator%5D%0A%0A @cla @@ -1417,19 +1417,17 @@ def -json_decode +from_json (cls @@ -1850,276 +1850,4 @@ ue)%0A -%0A def json_encode(self):%0A %22%22%22Construct a JSON-friendly representation of the object.%0A%0A :rtype: dict%0A %22%22%22%0A if self.value:%0A return %5Bself.field, self.operator, self.value%5D%0A else:%0A return %5Bself.field, self.operator%5D%0A
381dc5a1f92916d8ce66c7eef95e2237ff20b044
fix tests
astropy/coordinates/tests/test_sites.py
astropy/coordinates/tests/test_sites.py
from __future__ import (absolute_import, division, print_function, unicode_literals) from ...tests.helper import pytest, assert_quantity_allclose from ... import units as u from .. import Latitude, Longitude, EarthLocation, get_site, add_site, remove_site def test_get_site(): # Compare to the IRAF observatory list available at: # http://tdc-www.harvard.edu/iraf/rvsao/bcvcorr/obsdb.html keck = get_site('keck') lon, lat, el = keck.to_geodetic() assert_quantity_allclose(lon, -1*Longitude('155:28.7', unit=u.deg), atol=0.001*u.deg) assert_quantity_allclose(lat, Latitude('19:49.7', unit=u.deg), atol=0.001*u.deg) assert_quantity_allclose(el, 4160*u.m, atol=1*u.m) keck = get_site('ctio') lon, lat, el = keck.to_geodetic() assert_quantity_allclose(lon, -1*Longitude('70.815', unit=u.deg), atol=0.001*u.deg) assert_quantity_allclose(lat, Latitude('-30.16527778', unit=u.deg), atol=0.001*u.deg) assert_quantity_allclose(el, 2215*u.m, atol=1*u.m) def test_add_remove_site(): from ..sites import _site_db #needed for comparison below initlen = len(_site_db) # Test observatory can be added and retrieved new_site_name = 'University of Washington' new_site_location = EarthLocation(-122.3080*u.deg, 47.6550*u.deg, 0*u.m) add_site(new_site_name, new_site_location) retrieved_location = get_site(new_site_name) assert retrieved_location == new_site_location assert len(_site_db) == (initlen + 1) #now see if it can be removed remove_site(new_site_name) assert len(_site_db) == initlen #now try add/remove with aliases new_site_names = [new_site_name, 'UW'] add_site(new_site_names, new_site_location) assert len(_site_db) == (initlen + 2) remove_site(new_site_name, remove_aliases=True) assert len(_site_db) == initlen add_site(new_site_names, new_site_location) assert len(_site_db) == (initlen + 2) remove_site(new_site_names[1], remove_aliases=True) assert len(_site_db) == initlen def test_bad_site(): with pytest.raises(KeyError): get_site('nonexistent site')
Python
0.000001
@@ -1732,35 +1732,43 @@ now -try add/remove with aliases +check that alias removals works too %0A @@ -1926,37 +1926,16 @@ ite_name -, remove_aliases=True )%0A as @@ -2047,32 +2047,32 @@ = (initlen + 2)%0A + remove_site( @@ -2092,29 +2092,8 @@ s%5B1%5D -, remove_aliases=True )%0A
2ce05b9525d3b66676a269496de12b0cb54137c1
Set title directly on the graph constructor
mrdp/processing.py
mrdp/processing.py
""" Climate Data Online aggregation and graphing TODO Add a box plot of the temperature data to be more interesting? """ from csv import reader as csv_reader from datetime import date from pygal import Line __all__ = [ 'render_graphs', 'aggregate_monthly_data', 'monthly_total_precip_line', 'monthly_avg_min_max_temp_line', ] # 12-member list of the names of the months, i.e. ["January", "February", ...] MONTH_NAMES = [date(2016, month, 1).strftime('%B') for month in range(1, 13)] def render_graphs(csv_data, append_titles=""): """ Convenience function. Gets the aggregated `monthlies` data from `aggregate_monthly_data(csv_data)` and returns a dict of graph titles mapped to rendered SVGs from `monthly_total_precip_line()` and `monthly_avg_min_max_temp_line()` using the `monthlies` data. """ monthlies = aggregate_monthly_data(csv_data) return { graph.config.title: graph.render() for graph in [ monthly_total_precip_line(monthlies, append_titles), monthly_avg_min_max_temp_line(monthlies, append_titles), ] } def aggregate_monthly_data(csv_data): """ Pass your `csv_data` as an iterable whose members are individual lines of data (e.g. using a generator returned by the `iter_lines()` method of a `requests` library `Response` object) from a Climate Data Online (CDO)-style CSV file. Your CSV file must include the date (`DATE`), precipitation (`PRCP`), minimum temperature (`TMIN`), and maximum temperature (`TMAX`). The first line of your data file must be a header line. Returns a 12-member list of structured monthly data, each of which is a dict containing `days_of_data`, `precipitation_total`, `min_temperature_total`, and `max_temperature_total`. """ csv_data = csv_reader(csv_data) header_row = next(csv_data) date_index = header_row.index('DATE') prcp_index = header_row.index('PRCP') tmin_index = header_row.index('TMIN') tmax_index = header_row.index('TMAX') monthlies = [dict(days_of_data=0, precipitation_total=0, min_temperature_total=0, max_temperature_total=0) for _ in range(12)] for data_row in csv_data: monthly = monthlies[int(data_row[date_index][4:6]) - 1] monthly['days_of_data'] += 1 monthly['precipitation_total'] += int(data_row[prcp_index]) monthly['min_temperature_total'] += int(data_row[tmin_index]) monthly['max_temperature_total'] += int(data_row[tmax_index]) return monthlies def monthly_total_precip_line(monthlies, append_title=""): """ Given `monthlies` data as returned by `aggregate_monthly_data()`, returns a Pygal line graph of precipitation totals for each month. """ graph = Line(x_labels=MONTH_NAMES, x_label_rotation=90) graph.config.title = "Precipitation" + append_title graph.add("Precip(mm)", [monthly['precipitation_total'] / 10. for monthly in monthlies]) return graph def monthly_avg_min_max_temp_line(monthlies, append_title=""): """ Given `monthlies` data as returned by `aggregate_monthly_data()`, returns a Pygal line graph of average minimum and average maximum temperatures for each month. """ graph = Line(x_labels=MONTH_NAMES, x_label_rotation=90) graph.config.title = "Temperatures" + append_title graph.add("Avg High(C)", [monthly['max_temperature_total'] / 10. / monthly['days_of_data'] for monthly in monthlies]) graph.add("Avg Low(C)", [monthly['min_temperature_total'] / 10. / monthly['days_of_data'] for monthly in monthlies]) return graph
Python
0
@@ -2830,106 +2830,105 @@ ine( -x_labels=MONTH_NAMES, x_label_rotation=90)%0A graph.config.title = %22Precipitation%22 + append_title +title=%22Precipitation%22 + append_title,%0A x_labels=MONTH_NAMES, x_label_rotation=90) %0A%0A @@ -3341,105 +3341,120 @@ ine( -x_labels=MONTH_NAMES, x_label_rotation=90)%0A graph.config.title = %22Temperatures%22 + append_title +title=%22Average High/Low Temperature%22 + append_title,%0A x_labels=MONTH_NAMES, x_label_rotation=90) %0A%0A
e82cfc45585268b5942c169b8a0a61f8110ac832
Change exception handling
ysniff.py
ysniff.py
#!/usr/bin/env python import boto.rds import fileinput import sys import os from subprocess import call mac_index = 12 time_index = 1 start_t_us = 0 start_u_us = 0 MAC_LEN = 17 SAMPLE_PERIOD = 30 # Seconds. PUSH_TO_AWS_PERIOD = 300 # Seconds. maclist = set() buffer = {} # Function to re-associate with YaleGuest for internet connection def reconnect(): call(["sudo","iwconfig","wlan0","essid","YaleGuest"]) call(["sleep","5"]) call(["curl", "--data", "\"email=YaleGuest@yale.edu&cmd=cmd\"", "http://10.160.252.249/auth/index.html/u"]) try: print "Connecting to boto" conn=boto.connect_sdb() print "Getting SimpleDB domain" domain=conn.get_domain('tmp_ysniff') except Error as e: print e print "Attempting to connect to YaleGuest..." reconnect() print "Connecting to boto" conn=boto.connect_sdb() print "Getting SimpleDB domain" domain=conn.get_domain('tmp_ysniff') print "Reading from tcpdump" for line in fileinput.input(): splitline = line.split(" ") if mac_index < len(splitline): mac = splitline[mac_index] if mac == "DA:Broadcast": mac = splitline[mac_index+1] ts = int(splitline[time_index][:-2]) mac = mac[len(mac)-MAC_LEN:] # Make list of timestamps for each mac if mac not in buffer: buffer[mac]=[] # Only pair timestamp to mac address once if start_t_us not in buffer[mac]: buffer[mac].append(start_t_us) # Update start_t_us every SAMPLE_PERIOD if start_t_us is 0 or ts - start_t_us > (SAMPLE_PERIOD * 1000000): start_t_us = ts # upload buffer to AWS every PUSH_TO_AWS_PERIOD if start_u_us is 0: start_u_us = ts elif ts - start_u_us > (PUSH_TO_AWS_PERIOD * 1000000): for key in buffer: try: item = domain.get_item(key) except: reconnect() item = domain.get_item(key) for timestamp in buffer[key]: item[timestamp] = os.environ['PI_LOCATION'] try: item.save() except: reconnect() item.save() buffer = {} start_t_us = ts #print buffer, len(buffer)
Python
0.000001
@@ -698,12 +698,16 @@ pt E -rror +xception as
bdb8d48e0030474a616ec2e7e6d5f19132bb18e7
Fix account init
module/plugins/accounts/XFileSharingPro.py
module/plugins/accounts/XFileSharingPro.py
# -*- coding: utf-8 -*- import re from module.plugins.internal.XFSPAccount import XFSPAccount class XFileSharingPro(XFSPAccount): __name__ = "XFileSharingPro" __type__ = "account" __version__ = "0.03" __description__ = """XFileSharingPro multi-purpose account plugin""" __license__ = "GPLv3" __authors__ = [("Walter Purcaro", "vuolter@gmail.com")] HOSTER_NAME = None def loadAccountInfo(self, user, req): return super(XFileSharingPro if self.HOSTER_NAME else XFSPAccount, self).loadAccountInfo(user, req) def login(self, user, data, req): if self.HOSTER_NAME: return super(XFileSharingPro, self).login(user, data, req)
Python
0
@@ -22,19 +22,8 @@ *-%0A%0A -import re%0A%0A from @@ -202,17 +202,17 @@ _ = %220.0 -3 +4 %22%0A%0A _ @@ -375,16 +375,16 @@ om%22)%5D%0A%0A%0A - HOST @@ -400,16 +400,122 @@ None%0A%0A%0A + def init(self):%0A if self.HOSTER_NAME:%0A return super(XFileSharingPro, self).init()%0A%0A%0A def
c20118536c397daac0f1c6eeffeeefd663571f40
fix sqlcreate
daiquiri/core/management/commands/sqlcreate.py
daiquiri/core/management/commands/sqlcreate.py
import socket from django.conf import settings from django.core.management.base import BaseCommand class Command(BaseCommand): requires_system_checks = False can_import_settings = True def add_arguments(self, parser): parser.add_argument('--schema', help='Show commands for a science schema.') parser.add_argument('--test', action='store_true', help='Show commands for the test databases.') def get_config(self, key): config = settings.DATABASES.get(key) if config: if 'HOST' not in config or not config['HOST']: config['HOST'] = 'localhost' config['CLIENT'] = 'localhost' else: config['CLIENT'] = socket.gethostname() config['PREFIX'] = settings.QUERY_USER_SCHEMA_PREFIX config['TAP_SCHEMA'] = settings.TAP_SCHEMA config['TEST_NAME'] = 'test_%(NAME)s' % config return config def handle(self, *args, **options): config = {key: self.get_config(key) for key in ['default', 'data']} print('') if options['schema']: if 'data' in config: if config['data']['ENGINE'] == 'django.db.backends.mysql': config['data'].update({'SCHEMA_NAME': options['schema']}) print('''-- Run the following commands on \'%(HOST)s\': GRANT SELECT ON `%(SCHEMA_NAME)s`.* TO \'%(USER)s\'@\'%(CLIENT)s\'; ''' % config['data']) elif config['data']['ENGINE'] == 'django.db.backends.postgresql': config['data'].update({'SCHEMA_NAME': options['schema']}) print('''-- Run the following commands on %(HOST)s: \c %(NAME)s GRANT USAGE ON SCHEMA %(SCHEMA_NAME)s TO %(USER)s; GRANT SELECT ON ALL TABLES IN SCHEMA %(SCHEMA_NAME)s TO %(USER)s; ''' % config['data']) else: raise RuntimeError('No \'data\' database connection configured.') elif options['test']: if 'default' in config: if config['default']['ENGINE'] == 'django.db.backends.mysql': print('''-- For testing, run the following commands on \'%(HOST)s\': CREATE DATABASE `%(TEST_NAME)s`; GRANT ALL PRIVILEGES ON `%(TEST_NAME)s`.* to \'%(USER)s\'@\'%(CLIENT)s\'; ''' % config['default']) elif config['default']['ENGINE'] == 'django.db.backends.postgresql': print('''-- Run the following commands on \'%(HOST)s\': CREATE DATABASE %(TEST_NAME)s WITH OWNER %(USER)s; ''' % config['default']) if 'data' in config: if config['data']['ENGINE'] == 'django.db.backends.mysql': print('''-- For testing, run the following commands on \'%(HOST)s\': CREATE DATABASE `%(TEST_NAME)s`; GRANT ALL PRIVILEGES ON `%(TEST_NAME)s`.* to \'%(USER)s\'@\'%(CLIENT)s\'; ''' % config['data']) elif config['data']['ENGINE'] == 'django.db.backends.postgresql': print('''-- Run the following commands on \'%(HOST)s\': CREATE DATABASE %(TEST_NAME)s WITH OWNER %(USER)s; \c %(TEST_NAME)s CREATE SCHEMA %(TAP_SCHEMA)s AUTHORIZATION %(USER)s; ''' % config['data']) else: if 'default' in config: if config['default']['ENGINE'] == 'django.db.backends.mysql': print('''-- Run the following commands on \'%(HOST)s\': CREATE USER \'%(USER)s\'@\'%(CLIENT)s\' identified by \'%(PASSWORD)s\'; CREATE DATABASE `%(NAME)s`; GRANT ALL PRIVILEGES ON `%(NAME)s`.* to \'%(USER)s\'@\'%(CLIENT)s\'; ''' % config['default']) elif config['default']['ENGINE'] == 'django.db.backends.postgresql': print('''-- Run the following commands on \'%(HOST)s\': CREATE USER %(USER)s WITH PASSWORD \'%(PASSWORD)s\'; CREATE DATABASE %(NAME)s WITH OWNER %(USER)s; ''' % config['default']) if 'data' in config: if config['data']['ENGINE'] == 'django.db.backends.mysql': print('''-- Run the following commands on \'%(HOST)s\': CREATE USER \'%(USER)s\'@\'%(CLIENT)s\' identified by \'%(PASSWORD)s\'; GRANT ALL PRIVILEGES ON `%(TAP_SCHEMA)s`.* to \'%(USER)s\'@\'%(CLIENT)s\'; GRANT ALL PRIVILEGES ON `%(PREFIX)s%%`.* to \'%(USER)s\'@\'%(CLIENT)s\'; ''' % config['data']) elif config['data']['ENGINE'] == 'django.db.backends.postgresql': print('''-- Run the following commands on \'%(HOST)s\': CREATE USER %(USER)s WITH PASSWORD \'%(PASSWORD)s\'; CREATE DATABASE %(NAME)s WITH OWNER %(USER)s; \c %(NAME)s CREATE SCHEMA %(TAP_SCHEMA)s AUTHORIZATION %(USER)s; ''' % config['data'])
Python
0.000095
@@ -1125,32 +1125,30 @@ if -'data' in config +config%5B'data'%5D :%0A @@ -1982,32 +1982,39 @@ %0A if +config%5B 'default' in con @@ -2002,34 +2002,25 @@ ig%5B'default' - in config +%5D :%0A @@ -2562,32 +2562,30 @@ if -'data' in config +config%5B'data'%5D :%0A @@ -3203,16 +3203,23 @@ if +config%5B 'default @@ -3219,26 +3219,17 @@ default' - in config +%5D :%0A @@ -3876,24 +3876,22 @@ if -'data' in config +config%5B'data'%5D :%0A
ee931a528a1483bedc2951dd202f369460c0fec4
Update version
bottle_utils/__init__.py
bottle_utils/__init__.py
__version__ = '0.3.4' __author__ = 'Outernet Inc <hello@outernet.is>'
Python
0
@@ -16,9 +16,9 @@ 0.3. -4 +5 '%0A__
9a914ff1d1a40416fc890a1b90b7d017d214ff2c
Revert "Update __init__.py"
salt/wheel/__init__.py
salt/wheel/__init__.py
# -*- coding: utf-8 -*- ''' Modules used to control the master itself ''' from __future__ import absolute_import #import python libs import os import collections # Import salt libs from salt import syspaths import salt.config import salt.loader from salt.client import mixins from salt.utils.error import raise_error class WheelClient(mixins.SyncClientMixin, mixins.AsyncClientMixin, object): ''' An interface to Salt's wheel modules :ref:`Wheel modules <all-salt.wheel>` interact with various parts of the Salt Master. Importing and using ``WheelClient`` must be done on the same machine as the Salt Master and it must be done using the same user that the Salt Master is running as. Unless :conf_master:`external_auth` is configured and the user is authorized to execute wheel functions: (``@wheel``). Usage: .. code-block:: python import salt.config import salt.wheel opts = salt.config.master_config('/etc/salt/master') wheel = salt.wheel.WheelClient(opts) ''' client = 'wheel' tag_prefix = 'wheel' def __init__(self, opts=None): self.opts = opts self.functions = salt.loader.wheels(opts) # TODO: remove/deprecate def call_func(self, fun, **kwargs): ''' Backwards compatibility ''' return self.low(fun, kwargs) # TODO: Inconsistent with runner client-- the runner client's master_call gives # an async return, unlike this def master_call(self, **kwargs): ''' Execute a wheel function through the master network interface (eauth). ''' load = kwargs load['cmd'] = 'wheel' master_uri = 'tcp://' + salt.utils.ip_bracket(self.opts['interface']) + \ ':' + str(self.opts['ret_port']) channel = salt.transport.Channel.factory(self.opts, crypt='clear', master_uri=master_uri) ret = channel.send(load) if isinstance(ret, collections.Mapping): if 'error' in ret: raise_error(**ret['error']) return ret def cmd_sync(self, low, timeout=None): ''' Execute a wheel function synchronously; eauth is respected This function requires that :conf_master:`external_auth` is configured and the user is authorized to execute runner functions: (``@wheel``). .. code-block:: python >>> wheel.cmd_sync({ 'fun': 'key.finger', 'match': 'jerry', 'eauth': 'auto', 'username': 'saltdev', 'password': 'saltdev', }) {'minions': {'jerry': '5d:f6:79:43:5e:d4:42:3f:57:b8:45:a8:7e:a4:6e:ca'}} ''' return self.master_call(**low) # TODO: Inconsistent with runner client-- that one uses the master_call function # and runs within the master daemon. Need to pick one... def cmd_async(self, low): ''' Execute a function asynchronously; eauth is respected This function requires that :conf_master:`external_auth` is configured and the user is authorized .. code-block:: python >>> wheel.cmd_async({ 'fun': 'key.finger', 'match': 'jerry', 'eauth': 'auto', 'username': 'saltdev', 'password': 'saltdev', }) {'jid': '20131219224744416681', 'tag': 'salt/wheel/20131219224744416681'} ''' fun = low.pop('fun') return self.async(fun, low) def cmd(self, fun, arg=None, pub_data=None, kwarg=None): ''' Execute a function .. code-block:: python >>> wheel.cmd('key.finger', ['jerry']) {'minions': {'jerry': '5d:f6:79:43:5e:d4:42:3f:57:b8:45:a8:7e:a4:6e:ca'}} ''' return self.low(fun, kwarg) Wheel = WheelClient # for backward-compat
Python
0
@@ -3955,43 +3955,8 @@ '''%0A - return self.low(fun, kwarg) %0A%0AWh
595f2dfab6841555df27bd145a871b717a4c4c49
handle not having any siblings
cvserver.py
cvserver.py
# coding: utf-8 from __future__ import print_function from __future__ import unicode_literals import requests import bs4 import re DEBUG = False def response_for_image(image_url, client_name): base_url = 'http://deeplearning.cs.toronto.edu/api/url.php' files = { 'urllink': ('', image_url), 'url-2txt': ('', '') } headers = { 'connection': 'keep-alive', 'X-Requested-With': 'XMLHttpRequest', 'User-agent': "@interesting_jpg %s v. 1.0" % client_name } r = requests.post(base_url, files=files, headers=headers) text = r.text.strip() if not len(text): print('no text in response. status: %d %s' % (r.status_code, r.reason)) return None return text def nearest_neighbour(raw_text): if raw_text: soup = bs4.BeautifulSoup(raw_text, 'html.parser') try: return soup.li.get_text() except AttributeError as err: print(err) print(soup.prettify()) return None def captions(raw_text): soup = bs4.BeautifulSoup(raw_text) header = soup.find('h4', text=re.compile(r'Top')) if not header: print('error parsing text') print(soup.prettify()) return if DEBUG: print(header.find_next_sibling().prettify()) captions = header.find_next_sibling().find_all('li') return [c.text for c in captions] def top_caption(raw_text): all_captions = captions(raw_text) if DEBUG: print(all_captions) return all_captions[0] def main(): sample_response = """<img id="result-img" src="../tmpfiles/20150107-10:35:13.jpg" height="300"/><h4>TAGS:</h4><h4>&nbsp;&nbsp;cycler&nbsp;&nbsp;peddler&nbsp;&nbsp;salesman&nbsp;&nbsp;rucksack&nbsp;&nbsp;pedicab&nbsp;&nbsp;</h4><br/><h4>Nearest Neighbor Sentence:</h4><ul><li>a woman outside with an umbrella riding a motor cart .</li></ul><br/><h4>Top-5 Generated:</h4><ul><li>two men are wearing a hat , riding on a bicycle with a backpack .</li><li>a man in a cart filled with bikes .</li><li>a man wearing a hat while trying to ride a bicycle on a bike .</li><li>a man riding a bicycle with a cart attached .</li><li>a man wearing a hat on a bicycle and carrying a cart . </li></ul>""" print("\n".join(captions(sample_response))) print(top_caption(sample_response)) if __name__ == "__main__": main()
Python
0.000003
@@ -1306,24 +1306,24 @@ ())%0A -captions +next_sib = heade @@ -1347,24 +1347,30 @@ ng() -.find_all('li')%0A +%0A if next_sib:%0A @@ -1397,17 +1397,107 @@ in -captions%5D +next_sib.findall('li')%5D%0A else:%0A print(%22no headers found?%22)%0A print(soup.prettify()) %0A%0A%0Ad
a806272275fa0071abf038ceed913995c5e99bb5
add support of IAM roles
z3/get.py
z3/get.py
import argparse import sys import boto.s3 from z3.config import get_config def download(bucket, name): key = bucket.get_key(name) key.get_contents_to_file(sys.stdout) def main(): cfg = get_config() parser = argparse.ArgumentParser( description='Read a key from s3 and write the content to stdout', ) parser.add_argument('name', help='name of S3 key') args = parser.parse_args() bucket = boto.connect_s3( cfg['S3_KEY_ID'], cfg['S3_SECRET']).get_bucket(cfg['BUCKET']) download(bucket, args.name) if __name__ == '__main__': main()
Python
0
@@ -19,16 +19,20 @@ port sys +, re %0A%0Aimport @@ -40,149 +40,119 @@ boto -.s3%0A%0Afrom z3.config import get_config%0A%0A%0Adef download(bucket, name):%0A key = bucket.get_key(name)%0A key.get_contents_to_file(sys.stdout)%0A%0A +3%0Aimport botocore%0Afrom boto3.s3.transfer import TransferConfig%0Afrom z3.config import get_config%0A%0AMB = 1024 ** 2 %0Adef @@ -395,135 +395,586 @@ -bucket = boto.connect_s3(%0A cfg%5B'S3_KEY_ID'%5D, cfg%5B'S3_SECRET'%5D).get_bucket(cfg%5B'BUCKET'%5D)%0A download(bucket, args.name) +config = TransferConfig(max_concurrency=int(cfg%5B'CONCURRENCY'%5D), multipart_chunksize=int(re.sub('M', '', cfg%5B'CHUNK_SIZE'%5D)) * MB)%0A if 'S3_KEY_ID' in cfg:%0A s3 = boto3.client('s3'), aws_access_key_id=cfg%5B'S3_KEY_ID'%5D, aws_secret_access_key=cfg%5B'S3_SECRET'%5D)%0A else:%0A s3 = boto3.client('s3')%0A try:%0A s3.download_fileobj(cfg%5B'BUCKET'%5D, args.name, sys.stdout, Config = config)%0A except botocore.exceptions.ClientError as e:%0A if e.response%5B'Error'%5D%5B'Code'%5D == %22404%22:%0A print(%22The object does not exist.%22)%0A else:%0A raise %0A%0Aif
88544bedd046e642d0780cd9aede3141a03d2f90
change arg parser around - part 3 ... can run a single test suite now
ott/loader/otp/preflight/test_runner.py
ott/loader/otp/preflight/test_runner.py
import os import sys import time import inspect import logging log = logging.getLogger(__file__) from mako.template import Template from ott.utils.config_util import ConfigUtil from ott.utils import web_utils from ott.utils import file_utils from ott.utils import otp_utils from ott.loader.otp.preflight.test_suite import ListTestSuites def get_args_parser(): parser = otp_utils.get_initial_arg_parser() parser.add_argument('--hostname', '-hn', help="specify the hostname for the test url") parser.add_argument('--port', '-p', help="port") parser.add_argument('--ws_path', '-ws', help="OTP url path, ala 'prod' or '/otp/routers/default/plan'") parser.add_argument('--debug', '-d', help="run DEBUG suites", action='store_true') return parser class TestRunner(object): """ Run .csv tests from ./tests/ by constructing a url to the trip planner, calling the url, then printing a report """ this_module_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) def __init__(self, hostname=None, port=None, ws_path=None, suite_dir=None, report_mako_path=None, date=None, filter=None): """constructor builds the test runner """ # step 1: build OTP ws and map urls from config self.ws_url, self.map_url = otp_utils.get_test_urls_from_config(hostname=hostname, port=port, ws_path=ws_path) # step 2: set file and directory paths (suites dir contains .csv files defining tests) if suite_dir is None: suite_dir = os.path.join(self.this_module_dir, "suites") self.report_mako_path = report_mako_path if report_mako_path is None: self.report_mako_path = os.path.join(self.this_module_dir, 'templates', 'good_bad.html') # step 3: create mako template, and list of test suites self.report_template = Template(filename=self.report_mako_path) self.test_suites = ListTestSuites(ws_url=self.ws_url, map_url=self.map_url, suite_dir=suite_dir, date=date, filter=filter) def report(self, dir=None, report_name='otp_report.html'): """ render a test pass/fail report with mako """ ret_val = None try: # step 1: mako render of the report #import pdb; pdb.set_trace() suites = self.test_suites.get_suites() r = self.report_template.render(test_suites=suites, test_errors=self.test_suites.has_errors()) ret_val = r # step 2: stream the report to a file report_path = report_name if dir: report_path = os.path.join(dir, report_name) file_utils.mv(report_path, report_path + "-old") f = open(report_path, 'w') if r: f.write(r) else: f.write("Sorry, the template was null...") f.flush() f.close() except NameError, e: log.warn("This ERROR probably means your template has a variable not being sent down with render: {}".format(e)) except Exception, e: log.warn(e) return ret_val def send_email(self): """ send email """ try: t = time.strftime('%B %d, %Y (%A) %I:%M%p').lower().replace(" 0", " ") m = "" p = "PASSED" if self.test_suites.has_errors(): p = "FAILED" m = self.test_suites.list_errors() msg = "OTP tests {} on {}\n{}\n".format(p, t, m) recipients = ConfigUtil(section='contact').get('emails') web_utils.simple_email(msg, recipients) except Exception, e: log.warn(e) @classmethod def test_graph_factory(cls, args, graph_dir, suite_dir, delay=1): ''' run graph tests against whatever server is running ''' ret_val = False log.info('GRAPH TESTS: Starting tests!') time.sleep(delay) t = TestRunner(hostname=args.hostname, port=args.port, ws_path=args.ws_path, suite_dir=suite_dir, filter=args.test_suite) t.test_suites.run() t.report(graph_dir) if t.test_suites.has_errors(): log.info('GRAPH TESTS: There were errors!') t.send_email() ret_val = False else: log.info('GRAPH TESTS: Nope, no errors...') ret_val = True return ret_val def main(argv=sys.argv): #import pdb; pdb.set_trace() parser = get_args_parser() args = parser.parse_args() dir = None if args.debug: #log.basicConfig(level=log.DEBUG) dir = os.path.join(TestRunner.this_module_dir, "..", "tests", "suites") TestRunner.test_graph_factory(graph_dir=dir, suite_dir=dir, hostname=args.hostname, filter=args.test_suite) if __name__ == '__main__': #test_email() main() def test_email(): #import pdb; pdb.set_trace() t = TestRunner() t.send_email()
Python
0
@@ -4745,16 +4745,22 @@ factory( +args, graph_di @@ -4783,56 +4783,8 @@ =dir -, hostname=args.hostname, filter=args.test_suite )%0A%0Ai
d4b1852bcbdbfd458209f4e69c5cb7a5c3d9b9c4
Fix schema
marshmallow_peewee/convert.py
marshmallow_peewee/convert.py
import peewee as pw from marshmallow import fields, validate as ma_validate from marshmallow.compat import OrderedDict class Related(fields.Nested): def __init__(self, nested=None, meta=None, **kwargs): self.meta = meta or {} return super(Related, self).__init__(nested, **kwargs) def init_model(self, model, name): from .schema import ModelSchema field = model._meta.fields[name] self.meta['model'] = field.rel_model meta = type('Meta', (), self.meta) self.nested = type('Schema', (ModelSchema,), {'Meta': meta}) class ModelConverter(object): """ Convert Peewee model to Marshmallow schema.""" TYPE_MAPPING = { pw.PrimaryKeyField: fields.Integer, pw.IntegerField: fields.Integer, pw.BigIntegerField: fields.Integer, pw.FloatField: fields.Float, pw.DoubleField: fields.Float, pw.DecimalField: fields.Decimal, pw.CharField: fields.String, pw.FixedCharField: fields.String, pw.TextField: fields.String, pw.UUIDField: fields.UUID, pw.DateTimeField: fields.DateTime, pw.DateField: fields.Date, pw.TimeField: fields.Time, pw.BooleanField: fields.Boolean, pw.ForeignKeyField: fields.Integer, } def __init__(self, opts): self.opts = opts def fields_for_model(self, model): fields = self.opts.fields exclude = self.opts.exclude result = OrderedDict() for field in model._meta.sorted_fields: if fields and field.name not in fields: continue if exclude and field.name in exclude: continue for field in [f for f in model._meta.sorted_fields if not fields or f.name in fields]: ma_field = self.convert_field(field) if ma_field: result[field.name] = ma_field return result def convert_field(self, field): params = { 'allow_none': field.null, 'attribute': field.name, 'default': field.default, 'required': not field.null and not field.default, 'validate': field.coerce, } method = getattr(self, 'convert_' + field.__class__.__name__, self.convert_default) return method(field, **params) def convert_default(self, field, **params): """Return raw field.""" ma_field = self.TYPE_MAPPING.get(type(field), fields.Raw) return ma_field(**params) def convert_PrimaryKeyField(self, field, **params): dump_only = self.opts.dump_only_pk return fields.Integer(dump_only=dump_only, **params) def convert_CharField(self, field, validate=None, **params): validate = ma_validate.Length(max=field.max_length) return fields.String(validate=validate, **params) def convert_ForeignKeyField(self, field, attribute=None, **params): return fields.Integer(attribute=field.db_column, **params)
Python
0.000004
@@ -2056,46 +2056,8 @@ me,%0A - 'default': field.default,%0A @@ -2162,16 +2162,134 @@ %7D%0A +%0A if field.default is not None and not callable(field.default):%0A params%5B'default'%5D = field.default%0A%0A
e8d71eea391a002bdaa6cf923b3858b14aec1b7f
update and added VP9 Profile 2
resources/lib/services/msl/profiles.py
resources/lib/services/msl/profiles.py
# -*- coding: utf-8 -*- """MSL video profiles""" from __future__ import unicode_literals import xbmcaddon from resources.lib.globals import g import resources.lib.common as common HEVC = 'hevc-main-' HEVC_M10 = 'hevc-main10-' CENC_PRK = 'dash-cenc-prk' CENC = 'dash-cenc' CENC_TL = 'dash-cenc-ctl' HDR = 'hevc-hdr-main10-' DV = 'hevc-dv-main10-' DV5 = 'hevc-dv5-main10-' VP9 = 'vp9-profile0-' BASE_LEVELS = ['L30-', 'L31-', 'L40-', 'L41-', 'L50-', 'L51-'] CENC_TL_LEVELS = ['L30-L31-', 'L31-L40-', 'L40-L41-', 'L50-L51-'] def _profile_strings(base, tails): """Creates a list of profile strings by concatenating base with all permutations of tails""" return [base + level + tail[1] for tail in tails for level in tail[0]] PROFILES = { 'base': [ # Audio 'heaac-2-dash', # Unkown 'BIF240', 'BIF320'], 'dolbysound': ['ddplus-2.0-dash', 'ddplus-5.1-dash', 'ddplus-5.1hq-dash', 'ddplus-atmos-dash'], 'h264': ['playready-h264mpl30-dash', 'playready-h264mpl31-dash', 'playready-h264mpl40-dash', 'playready-h264mpl41-dash', 'playready-h264hpl30-dash', 'playready-h264hpl31-dash'], 'hevc': _profile_strings(base=HEVC, tails=[(BASE_LEVELS, CENC), (CENC_TL_LEVELS, CENC_TL)]) + _profile_strings(base=HEVC_M10, tails=[(BASE_LEVELS, CENC), (BASE_LEVELS[:4], CENC_PRK), (CENC_TL_LEVELS, CENC_TL)]), 'hdr': _profile_strings(base=HDR, tails=[(BASE_LEVELS, CENC), (BASE_LEVELS, CENC_PRK)]), 'dolbyvision': _profile_strings(base=DV, tails=[(BASE_LEVELS, CENC)]) + _profile_strings(base=DV5, tails=[(BASE_LEVELS, CENC_PRK)]), 'vp9': _profile_strings(base=VP9, tails=[(BASE_LEVELS, CENC)]) } def enabled_profiles(): """Return a list of all base and enabled additional profiles""" return (PROFILES['base'] + PROFILES['h264'] + PROFILES['vp9'] + _subtitle_profiles() + _additional_profiles('dolbysound', 'enable_dolby_sound') + _additional_profiles('hevc', 'enable_hevc_profiles') + _additional_profiles('hdr', ['enable_hevc_profiles', 'enable_hdr_profiles']) + _additional_profiles('dolbyvision', ['enable_hevc_profiles', 'enable_dolbyvision_profiles'])) def _subtitle_profiles(): isversion = xbmcaddon.Addon('inputstream.adaptive').getAddonInfo('version') return ['webvtt-lssdh-ios8' if common.is_minimum_version(isversion, '2.3.8') else 'simplesdh'] def _additional_profiles(profiles, req_settings=None, forb_settings=None): return (PROFILES[profiles] if (all(g.ADDON.getSettingBool(setting) for setting in common.make_list(req_settings)) and not (any(g.ADDON.getSettingBool(setting) for setting in common.make_list(forb_settings)))) else [])
Python
0
@@ -370,16 +370,25 @@ 10-'%0AVP9 +_PROFILE0 = 'vp9- @@ -397,16 +397,47 @@ ofile0-' +%0AVP9_PROFILE2 = 'vp9-profile2-' %0A%0ABASE_L @@ -833,32 +833,66 @@ 'heaac-2-dash',%0A + 'playready-heaac-2-dash',%0A # Unkown @@ -888,16 +888,17 @@ # Unk +n own%0A @@ -1154,36 +1154,8 @@ sh', - 'playready-h264mpl41-dash', %0A @@ -1214,24 +1214,65 @@ 4hpl31-dash' +,%0A 'playready-h264hpl40-dash' %5D,%0A 'hevc @@ -2009,16 +2009,24 @@ 'vp9 +profile0 ':%0A @@ -2053,16 +2053,25 @@ base=VP9 +_PROFILE0 ,%0A @@ -2117,16 +2117,138 @@ CENC)%5D) +,%0A 'vp9profile2':%0A _profile_strings(base=VP9_PROFILE2,%0A tails=%5B(BASE_LEVELS, CENC_PRK)%5D) %0A%7D%0A%0A%0Adef @@ -2394,24 +2394,24 @@ S%5B'h264'%5D +%0A - @@ -2423,16 +2423,62 @@ LES%5B'vp9 +profile0'%5D +%0A PROFILES%5B'vp9profile2 '%5D +%0A
3f2581aa9f3dddc2e9cf2d79421655d5858bfcbc
Use --ati on ATI cards or for other places you want interactive card choosing
hydrogen.py
hydrogen.py
#!/usr/bin/env python import pyopencl as cl import numpy import sys from math import * from time import time from PIL import Image from optparse import OptionParser def independentPsi(n, l): """The first term of psi is constant with respect to the entire image, so it can be computed outside of the inner loop.""" rootFirst = (2.0 / n) ** 3 rootSecond = factorial(n - l - 1.0) / (2.0 * ((n * factorial(n + l)) ** 3)) return sqrt(rootFirst * rootSecond) def renderOrbitals((ni, li, mi), imageResolution): """Create various buffers to shuttle data to/from the CPU/GPU, then execute the kernel for each pixel in the input image, and copy the data into a numpy array""" res = numpy.int32(imageResolution) pointCount = imageResolution ** 2 n = numpy.int32(ni) l = numpy.int32(li) m = numpy.int32(mi) # Create native buffer and mirror OpenCL buffer mf = cl.mem_flags output = numpy.zeros(pointCount).astype(numpy.float32) outputBuffer = cl.Buffer(main.ctx, mf.READ_WRITE | mf.COPY_HOST_PTR, hostbuf=output) # Evaluate first term of psi once for entire image ipsi = numpy.float32(independentPsi(n, l)) # Evaluate the rest of psi once for each pixel, copy into output buffer before = time() main.prg.density(main.queue, [pointCount], ipsi, n, l, m, outputBuffer, res) cl.enqueue_read_buffer(main.queue, outputBuffer, output).wait() duration = time() - before outputBuffer.release() return (output, duration) def exportImage(output): """Export a visual representation of the computed density of the wavefunction as a PNG (to /tmp/orbitals.png, for now)""" # Linearly scale image data so that brightest pixel is white scaleFactor = 255.0 / max(output) for i in range(0, len(output)): output[i] *= scaleFactor # Output PNG of computed orbital density res = int(sqrt(len(output))) img = Image.new("L", (res, res)) img.putdata(output) img.show() img.save("/tmp/orbitals.png", "PNG") def benchmark(skip, params=(3,2,0)): """Time computation of the image at various different resolutions""" for res in range(100,1010,skip): minDuration = 0 for itr in range(5): (output, duration) = renderOrbitals(params, res) if max(output) > 0.0: minDuration = min(minDuration, duration) # At some resolutions, on my mobile GPU, I get a totally black image # I have no idea why this happens, but this sits here to discard # those results, since their timing seems to be somewhat inaccurate. if minDuration: print "{0},{1}".format(res, minDuration) def main(): # Parse commandline arguments parser = OptionParser(usage="%prog [-b/B] [-c] [-p n,l,m]", version="%prog 0.1") parser.add_option("-i", "--individual-bench", action="store_true", default=False, dest="onebench", help="run one benchmark") parser.add_option("-b", "--bench", action="store_true", default=False, dest="benchmark", help="run short benchmark") parser.add_option("-B", "--long-bench", action="store_true", default=False, dest="longBenchmark", help="run long benchmark") parser.add_option("-c", "--cpu", action="store_true", default=False, dest="useCPU", help="run on CPU instead of GPU") parser.add_option("-p", None, default="3,2,0", dest="params", help="choose parameters to wavefunction") parser.add_option("-r", None, default="400", dest="res", help="choose resolution of output image") (options, args) = parser.parse_args() params = tuple([int(a) for a in options.params.split(",")]) #if options.useCPU: # main.ctx = cl.Context(dev_type=cl.device_type.CPU) #else: # main.ctx = cl.Context(dev_type=cl.device_type.GPU) main.ctx = cl.create_some_context() # Output device(s) being used for computation if not (options.benchmark or options.longBenchmark or options.onebench): print "Running on:" for dev in main.ctx.get_info(cl.context_info.DEVICES): print " ", print dev.get_info(cl.device_info.VENDOR), print dev.get_info(cl.device_info.NAME) print # Load and compile the OpenCL kernel main.queue = cl.CommandQueue(main.ctx) kernelFile = open('hydrogen.cl', 'r') main.prg = cl.Program(main.ctx, kernelFile.read()).build() kernelFile.close() # Evaluate psi with the given parameters, in the given mode if options.benchmark: benchmark(100, params=params) elif options.longBenchmark: benchmark(10, params=params) elif options.onebench: (output, duration) = renderOrbitals(params, int(options.res)) print "{0},{1}".format(options.res, duration) else: (output, duration) = renderOrbitals(params, int(options.res)) exportImage(output) print duration if __name__ == "__main__": sys.exit(main())
Python
0
@@ -2876,16 +2876,161 @@ g 0.1%22)%0A + parser.add_option(%22-a%22, %22--ati%22, action=%22store_true%22, default=False,%0A dest=%22promptForDevice%22, help=%22prompt for device%22)%0A pars @@ -3999,17 +3999,56 @@ %5D)%0A%0A -# +if not options.promptForDevice:%0A if optio @@ -4058,25 +4058,28 @@ useCPU:%0A -# + main.ctx @@ -4125,17 +4125,20 @@ PU)%0A -# + else:%0A @@ -4139,17 +4139,20 @@ se:%0A -# + main @@ -4198,17 +4198,30 @@ pe.GPU)%0A -%0A + else:%0A main
d29f175ce5a410044130569ba765bc7c9829c0a6
Add missing import
salt/wheel/__init__.py
salt/wheel/__init__.py
# -*- coding: utf-8 -*- ''' Modules used to control the master itself ''' from __future__ import absolute_import #import python libs import os import collections # Import salt libs from salt import syspaths import salt.config import salt.loader from salt.client import mixins class WheelClient(mixins.SyncClientMixin, mixins.AsyncClientMixin, object): ''' An interface to Salt's wheel modules :ref:`Wheel modules <all-salt.wheel>` interact with various parts of the Salt Master. Importing and using ``WheelClient`` must be done on the same machine as the Salt Master and it must be done using the same user that the Salt Master is running as. Unless :conf_master:`external_auth` is configured and the user is authorized to execute wheel functions: (``@wheel``). ''' client = 'wheel' tag_prefix = 'wheel' def __init__(self, opts=None): self.opts = opts self.functions = salt.loader.wheels(opts) # TODO: remove/deprecate def call_func(self, fun, **kwargs): ''' Backwards compatibility ''' return self.low(fun, kwargs) # TODO: Inconsistent with runner client-- the runner client's master_call gives # an async return, unlike this def master_call(self, **kwargs): ''' Execute a wheel function through the master network interface (eauth). ''' load = kwargs load['cmd'] = 'wheel' master_uri = 'tcp://' + salt.utils.ip_bracket(self.opts['interface']) + \ ':' + str(self.opts['ret_port']) channel = salt.transport.Channel.factory(self.opts, crypt='clear', master_uri=master_uri) ret = channel.send(load) if isinstance(ret, collections.Mapping): if 'error' in ret: raise_error(**ret['error']) return ret def cmd_sync(self, low, timeout=None): ''' Execute a wheel function synchronously; eauth is respected This function requires that :conf_master:`external_auth` is configured and the user is authorized to execute runner functions: (``@wheel``). .. code-block:: python >>> wheel.cmd_sync({ 'fun': 'key.finger', 'match': 'jerry', 'eauth': 'auto', 'username': 'saltdev', 'password': 'saltdev', }) {'minions': {'jerry': '5d:f6:79:43:5e:d4:42:3f:57:b8:45:a8:7e:a4:6e:ca'}} ''' return self.master_call(**low) # TODO: Inconsistent with runner client-- that one uses the master_call function # and runs within the master daemon. Need to pick one... def cmd_async(self, low): ''' Execute a function asynchronously; eauth is respected This function requires that :conf_master:`external_auth` is configured and the user is authorized .. code-block:: python >>> wheel.cmd_async({ 'fun': 'key.finger', 'match': 'jerry', 'eauth': 'auto', 'username': 'saltdev', 'password': 'saltdev', }) {'jid': '20131219224744416681', 'tag': 'salt/wheel/20131219224744416681'} ''' fun = low.pop('fun') return self.async(fun, low) Wheel = WheelClient # for backward-compat
Python
0.000466
@@ -270,16 +270,57 @@ mixins%0A +from salt.utils.error import raise_error%0A %0A%0Aclass
5a7887b6a2fcaacb452ee6b2344f29c337478167
Version bump to 0.4.0
cartridge/__init__.py
cartridge/__init__.py
__version__ = "0.3.7"
Python
0
@@ -14,9 +14,9 @@ %220. -3.7 +4.0 %22%0A
3a177a48b1e8f51eb9f13b874879b7fa47216897
Add comments to multimission-simulation
grizli/version.py
grizli/version.py
# Should be one commit behind latest __version__ = "0.1.1-25-g3109f16"
Python
0
@@ -56,16 +56,16 @@ .1-2 -5-g3109f16 +9-gac73d73 %22%0A
6934ba49ff6f594910843d951606f80db67d9b4b
return to older return
hyml/ext.py
hyml/ext.py
#!/usr/bin/python3 # -*- coding: utf-8 -*- # Copyright (c) Marko Manninen <elonmedia@gmail.com>, 2017 import hy, hy.importer as hyi from jinja2.ext import extract_from_ast import itertools def extract_from_ast(source, keywords): d = None def filter_hy(e): # basicly we are searching for babel keyword expressions here # and when one is found, it is returned along with: # 0 linenumber, keyword itself, and message string global d if isinstance(e, hy.HyExpression) or isinstance(e, list): if isinstance(e, hy.HyExpression): # this could be the keyword we are searching for d = e[0] # flatten list, maybe could be done later... x = list(itertools.chain(*filter(None, map(filter_hy, e)))) # reset keyword d = None return x elif not isinstance(e, hy.HySymbol) and isinstance(e, hy.HyString) and d in keywords: # no comments available, thus only three items are returned # TODO: message context and plural message support return 0, str(d), {"context": str(e), "singular": str(e), "plural": str(e)} return filter_hy(source) def chunks(long_list, n): # split list to n chunks for i in range(0, len(long_list), n): t = long_list[i:i + n] # add empty keyword list to the tuple for babel yield tuple(t[:2]+[t[2]["singular"]]+[[]]) def babel_extract(fileobj, *args, **kw): byte = fileobj.read() # unfortunately line breaks (line numbers) are lost at this point... source = "".join(map(chr, byte)) if source: node = hyi.import_buffer_to_hst(source) if node: # map keywords to hy symbols for later comparison if len(args[0]) > 0: keywords = map(hy.HySymbol, args[0]) else: keywords = map(hy.HySymbol, ['ngettext', 'pgettext', 'ungettext', 'dngettext', 'dgettext', 'ugettext', 'gettext', '_', 'N_', 'npgettext']) ast = extract_from_ast(node, keywords) if ast: return chunks(ast, 3)
Python
0.999999
@@ -1269,16 +1269,38 @@ chunks%0A + if long_list:%0A for @@ -1329,24 +1329,28 @@ _list), n):%0A + t = @@ -1376,16 +1376,20 @@ + # add em @@ -1428,16 +1428,20 @@ r babel%0A + @@ -1665,27 +1665,8 @@ e))%0A - if source:%0A @@ -1709,33 +1709,8 @@ ce)%0A - if node:%0A @@ -1759,24 +1759,16 @@ parison%0A - if l @@ -1784,24 +1784,16 @@ %5D) %3E 0:%0A - @@ -1837,30 +1837,14 @@ - else:%0A +else:%0A @@ -1986,24 +1986,16 @@ text'%5D)%0A - ast @@ -2033,40 +2033,8 @@ ds)%0A - if ast:%0A