commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
1a3869b97874888f27817eedb840747ba9399d6e
|
Add missing slashes
|
indico/modules/events/papers/blueprint.py
|
indico/modules/events/papers/blueprint.py
|
# This file is part of Indico.
# Copyright (C) 2002 - 2017 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from flask import current_app, g
from indico.modules.events.papers.controllers import display, management, templates, paper
from indico.web.flask.wrappers import IndicoBlueprint
_bp = IndicoBlueprint('papers', __name__, url_prefix='/event/<confId>', template_folder='templates',
virtual_template_folder='events/papers')
# Display pages
_bp.add_url_rule('/contributions/<int:contrib_id>/paper/submit',
'submit_revision', display.RHSubmitPaper, methods=('GET', 'POST'))
_bp.add_url_rule('/papers/<int:contrib_id>/', 'paper_timeline', display.RHPaperTimeline)
_bp.add_url_rule('/papers/<int:contrib_id>/files/<int:file_id>-<filename>', 'download_file',
display.RHDownloadPaperFile)
_bp.add_url_rule('/papers/templates/<int:template_id>-<filename>', 'download_template',
templates.RHDownloadPaperTemplate)
# Paper reviews
_bp.add_url_rule('papers/<int:contrib_id>/review/type/<review_type>', 'submit_review',
display.RHSubmitPaperReview, methods=('GET', 'POST'))
_bp.add_url_rule('papers/<int:contrib_id>/revision/<int:revision_id>/review/<int:review_id>/edit', 'edit_review',
display.RHEditPaperReview, methods=('GET', 'POST'))
# Paper comments
_bp.add_url_rule('/papers/<int:contrib_id>/comment', 'submit_comment',
display.RHSubmitPaperComment, methods=('POST',))
_bp.add_url_rule('/papers/<int:contrib_id>/revision/<int:revision_id>/comment/<int:comment_id>', 'edit_comment',
display.RHEditPaperComment, methods=('GET', 'POST'))
_bp.add_url_rule('/papers/<int:contrib_id>/revision/<int:revision_id>/comment/<int:comment_id>', 'delete_comment',
display.RHDeletePaperComment, methods=('DELETE',))
# Reviewing area
_bp.add_url_rule('/papers/reviewing/', 'reviewing_area', display.RHReviewingArea)
# Management
_bp.add_url_rule('/manage/papers/', 'management', management.RHPapersDashboard)
_bp.add_url_rule('/manage/papers/deadlines/<any(judge,content_reviewer,layout_reviewer):role>', 'manage_deadline',
management.RHSetDeadline, methods=('GET', 'POST'))
_bp.add_url_rule('/manage/papers/settings', 'manage_reviewing_settings', management.RHManageReviewingSettings,
methods=('GET', 'POST'))
_bp.add_url_rule('/manage/papers/templates/', 'manage_templates', templates.RHManagePaperTemplates)
_bp.add_url_rule('/manage/papers/templates/add', 'upload_template',
templates.RHUploadPaperTemplate, methods=('GET', 'POST'))
_bp.add_url_rule('/manage/papers/templates/<int:template_id>-<filename>', 'delete_template',
templates.RHDeletePaperTemplate, methods=('DELETE',))
_bp.add_url_rule('/manage/papers/templates/<int:template_id>-<filename>/edit', 'edit_template',
templates.RHEditPaperTemplate, methods=('GET', 'POST'))
_bp.add_url_rule('/manage/papers/teams/', 'manage_teams', management.RHManagePaperTeams,
methods=('GET', 'POST'))
_bp.add_url_rule('/manage/papers/teams/competences', 'manage_competences', management.RHManageCompetences,
methods=('GET', 'POST'))
_bp.add_url_rule('/manage/papers/enable/<any(content,layout):reviewing_type>', 'switch',
management.RHSwitchReviewingType, methods=('PUT', 'DELETE'))
# CFP scheduling
_bp.add_url_rule('/manage/papers/schedule', 'schedule_cfp', management.RHScheduleCFP,
methods=('GET', 'POST'))
_bp.add_url_rule('/manage/papers/open', 'open_cfp', management.RHOpenCFP, methods=('POST',))
_bp.add_url_rule('/manage/papers/close', 'close_cfp', management.RHCloseCFP, methods=('POST',))
# URLs available in both management and display areas
# Note: When adding a new one here make sure to specify `defaults=defaults`
# for each rule. Otherwise you may not get the correct one.
for prefix, is_management in (('/manage/papers/assignment-list', True), ('/papers/judging', False)):
defaults = {'management': is_management}
_bp.add_url_rule(prefix + '/', 'papers_list', paper.RHPapersList, defaults=defaults)
_bp.add_url_rule(prefix + '/download', 'download_papers', paper.RHDownloadPapers, methods=('POST',),
defaults=defaults)
_bp.add_url_rule(prefix + '/customize', 'customize_paper_list', paper.RHCustomizePapersList,
methods=('GET', 'POST'), defaults=defaults)
_bp.add_url_rule(prefix + '/judge', 'judge_papers', paper.RHJudgePapers, methods=('GET', 'POST'), defaults=defaults)
_bp.add_url_rule(prefix + '/assign/<any(judge,content_reviewer,layout_reviewer):role>', 'assign_papers',
paper.RHAssignPapers, methods=('POST',), defaults=defaults)
_bp.add_url_rule(prefix + '/unassign/<any(judge,content_reviewer,layout_reviewer):role>', 'unassign_papers',
paper.RHUnassignPapers, methods=('POST',), defaults=defaults)
@_bp.url_defaults
def _add_management_flag(endpoint, values):
if ('management' not in values and
endpoint.split('.')[0] == _bp.name and
current_app.url_map.is_endpoint_expecting(endpoint, 'management')):
# XXX: using getattr because the conference menu builds the url from an
# RH without the management attribute
values['management'] = getattr(g.rh, 'management', False)
# Legacy URLs
_compat_bp = IndicoBlueprint('compat_papers', __name__, url_prefix='/event/<int:confId>')
# TODO...
|
Python
| 0.99999
|
@@ -1667,32 +1667,33 @@
p.add_url_rule('
+/
papers/%3Cint:cont
@@ -1834,16 +1834,17 @@
l_rule('
+/
papers/%3C
|
f00bb9484dcac9e36aea43779c497fd27e01ec80
|
fix argument name in process_raw
|
raspberryturk/external/data/process_raw.py
|
raspberryturk/external/data/process_raw.py
|
import numpy as np
import cv2
import os
import errno
import shutil
import chess
import hashlib
import argparse
from random import random
from raspberryturk.core.vision.chessboard_frame import ChessboardFrame
from raspberryturk.core.vision.constants import SQUARE_SIZE, BOARD_SIZE
def _create_processed_dir(target_path):
for sub in ['rgb', 'grayscale']:
path = os.path.join(target_path, sub)
try:
os.makedirs(path)
except OSError as exception:
if exception.errno != errno.EEXIST or not os.path.isdir(path):
raise
def _subdirectories(path):
return [os.path.join(path,o) for o in os.listdir(path) if os.path.isdir(os.path.join(path,o))
and not o.startswith('.')]
def _images(path, cache):
images = []
for o in os.listdir(path):
fn, ext = os.path.splitext(os.path.basename(o))
if o.lower().endswith(('.png', '.jpg', '.jpeg')) and not cache.get(fn, False):
images.append(os.path.join(path,o))
cache[fn] = True
return images
def _rotate(img, angle):
rows,cols = img.shape[:2]
M = cv2.getRotationMatrix2D((cols/2,rows/2),angle,1)
return cv2.warpAffine(img,M,(cols,rows))
def _process(board, img, target_path):
cf = ChessboardFrame(img)
for i in range(64):
piece = board.piece_at(i)
fn = chess.SQUARE_NAMES[i]
sq = cf.square_at(i)
rand_addition = hashlib.sha1(str(random())).hexdigest()[0:8]
for a in range(0, 360, 90):
sym = 'e'
if piece is not None:
sym = piece.symbol()
img_name = "{0}-{1}-{2}-{3}.jpg".format(sym, fn, a, rand_addition)
cv2.imwrite(os.path.join(target_path, 'rgb', img_name), _rotate(sq.raw_img, a))
cv2.imwrite(os.path.join(target_path, 'grayscale', img_name), _rotate(cv2.cvtColor(sq.raw_img, cv2.COLOR_BGR2GRAY), a))
def _get_args():
prog = os.path.relpath(__file__)
desc = "Utility used to process raw chessboard image data collected by collection.py. \
It goes through the contents of BASE_PATH/raw, processes the collected \
data, and stores the processed format in BASE_PATH/processed. By default \
it will keep track of what raw data has been processed so it can be \
run multiple times without reprocessing the same data."
parser = argparse.ArgumentParser(prog=prog, description=desc)
parser.add_argument('target_path', type=os.path.abspath,
help="Source path for data processing.")
parser.add_argument('target_path', type=os.path.abspath,
help="Target path for data processing.")
parser.add_argument('-x', '--ignore_cache', action='store_true', \
help="If True, this will delete all contents \
in the target_path and reprocess everything.")
return parser.parse_args()
def main():
args = _get_args()
target_path = args.target_path
if args.ignore_cache:
shutil.rmtree(target_path)
_create_processed_dir(target_path)
cache = {}
cache_path = os.path.join(target_path, '.board_cache')
if not args.ignore_cache:
try:
cache = {k:True for k in [line.strip() for line in open(cache_path)]}
except IOError:
pass
source_path = args.source_path
seed_dirs = _subdirectories(source_path)
for seed_dir in seed_dirs:
rotation_dirs = _subdirectories(seed_dir)
for rotation_dir in rotation_dirs:
with open(os.path.join(rotation_dir, "board.fen")) as f:
fen = f.read()
board = chess.BaseBoard(board_fen=fen)
for img_path in _images(rotation_dir, cache):
img = cv2.imread(img_path)
_process(board, img, target_path)
with open(cache_path, 'w') as f:
f.write("\n".join(cache.keys()))
if __name__ == '__main__':
main()
|
Python
| 0.999346
|
@@ -2536,38 +2536,38 @@
r.add_argument('
-target
+source
_path', type=os.
|
50d11a45ddbd2b535111de2307a0c6a1b0443577
|
Fix imports
|
motobot/core_plugins/privmsg_handlers.py
|
motobot/core_plugins/privmsg_handlers.py
|
from motobot import hook
from time import strftime, localtime
@hook('PRIVMSG')
def __handle_privmsg(bot, message):
""" Handle the privmsg commands.
Will send the reply back to the channel the command was sent from,
or back to the user whom sent it in the case of a private message.
Commands (prefixed with command_prefix) are executed, CTCP is handled,
and the matches are checked.
"""
response = None
message.message = strip_control_codes(message.message)
target = message.channel \
if is_channel(message.channel) \
else message.nick
if message.message.startswith(bot.command_prefix):
command = message.message.split(' ')[0][len(bot.command_prefix):]
response = bot.commands[command](bot, message, bot.database)
if response is not None:
response = 'PRIVMSG {} :{}'.format(target, response)
elif is_ctcp(message):
response = ctcp_response(message.message[1:-1])
if response is not None:
response = 'NOTICE {} :\u0001{}\u0001'.format(target, response)
else:
for pattern, func in bot.patterns:
if pattern.search(message.message):
response = func(bot, message, bot.database)
if response is not None:
response = 'PRIVMSG {} :{}'.format(target, response)
if response is None:
for sink in bot.sinks:
response = sink(bot, message, bot.database)
if response is not None:
response = 'PRIVMSG {} :{}'.format(target, response)
break
return response
def strip_control_codes(input):
""" Strip the control codes from the input. """
pattern = re.compile(r'\x03[0-9]{0,2},?[0-9]{0,2}|\x02|\x1D|\x1F|\x16|\x0F')
output = pattern.sub('', input)
return output
def is_channel(name):
""" Check if a name is a valid channel name or not. """
valid = ['&', '#', '+', '!']
invalid = [' ', ',', '\u0007']
return (name[0] in valid) and all(c not in invalid for c in name)
def is_ctcp(message):
""" Check if a message object is a ctcp message or not. """
return message.message.startswith('\u0001') and \
message.message.endswith('\u0001')
def ctcp_response(message):
""" Return the appropriate response to a CTCP request. """
mapping = {
'VERSION': 'MotoBot Version 2.0',
'FINGER': 'Oh you dirty man!',
'TIME': strftime('%a %b %d %H:%M:%S', localtime()),
'PING': message
}
return mapping.get(message.split(' ')[0].upper(), None)
|
Python
| 0.000002
|
@@ -55,16 +55,26 @@
caltime%0A
+import re%0A
%0A%0A@hook(
|
cfb7e2a4659d8537b8026c2928c5a635e16070ee
|
Speed up the user admin page
|
apps/auth/admin.py
|
apps/auth/admin.py
|
# Amara, universalsubtitles.org
#
# Copyright (C) 2013 Participatory Culture Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see
# http://www.gnu.org/licenses/agpl-3.0.html.
from datetime import datetime
from django import forms
from django.contrib import admin
from django.contrib.admin import widgets
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.models import User
from django.db import models
from django.utils.translation import ugettext_lazy as _
from models import CustomUser, Announcement
class CustomUserCreationForm(UserCreationForm):
username = forms.RegexField(label=_("Username"), max_length=30, regex=r'^\w+$',
help_text = _("Required. 30 characters or fewer. Alphanumeric characters only (letters, digits and underscores)."),
error_message = _("This value must contain only letters, numbers and underscores."))
password1 = forms.CharField(label=_("Password"), widget=forms.PasswordInput)
password2 = forms.CharField(label=_("Password confirmation"), widget=forms.PasswordInput)
email = forms.EmailField(label=_('Email'))
class Meta:
model = CustomUser
fields = ("username", "email")
class CustomUserAdmin(UserAdmin):
add_form = CustomUserCreationForm
list_display = ('username', 'email', 'first_name', 'last_name', 'is_staff',
'is_superuser', 'last_ip', 'partner')
search_fields = ('username', 'first_name', 'last_name', 'email', 'id')
add_fieldsets = (
(None, {
'classes': ('wide',),
'fields': ('username', 'email', 'password1', 'password2')}
),
)
class AnnouncementAdmin(admin.ModelAdmin):
formfield_overrides = {
models.CharField: {'widget': widgets.AdminTextareaWidget}
}
list_display = ('content', 'created', 'visible')
actions = ['make_hidden']
def get_form(self, request, obj=None, **kwargs):
form = super(AnnouncementAdmin, self).get_form(request, obj=None, **kwargs)
default_help_text = form.base_fields['created'].help_text
now = datetime.now()
form.base_fields['created'].help_text = default_help_text+\
u'</br>Current server time is %s. Value is saved without timezone converting.' % now.strftime('%m/%d/%Y %H:%M:%S')
return form
def visible(self, obj):
return not obj.hidden
visible.boolean = True
def make_hidden(self, request, queryset):
Announcement.clear_cache()
queryset.update(hidden=True)
make_hidden.short_description = _(u'Hide')
admin.site.register(Announcement, AnnouncementAdmin)
admin.site.unregister(User)
admin.site.register(CustomUser, CustomUserAdmin)
|
Python
| 0.999004
|
@@ -886,16 +886,71 @@
widgets%0A
+from django.contrib.admin.views.main import ChangeList%0A
from dja
@@ -985,24 +985,24 @@
t UserAdmin%0A
-
from django.
@@ -1875,16 +1875,730 @@
mail%22)%0A%0A
+class UserChangeList(ChangeList):%0A def get_ordering(self, request, queryset):%0A # The default ChangeList code adds CustomUser.id to the list of%0A # ordering fields to make things deterministic. However this kills%0A # performance because the ORDER BY clause includes columns from 2%0A # different tables (auth_user.username, auth_customuser.id).%0A #%0A # Also, sorting by any column other than user also kills performance%0A # since our user table is quite large at this point.%0A #%0A # So we just override everything and force the sort to be username.%0A # Username is a unique key so the sort will be fast and deterministic.%0A return %5B'username'%5D%0A%0A
class Cu
@@ -3033,15 +3033,95 @@
),%0A
-
)%0A%0A
+ def get_changelist(self, request, **kwargs):%0A return UserChangeList%0A%0A
clas
|
b6c529733e5334e90210e46227fd6cd64559180f
|
add support for gcode comments
|
aprinter_encode.py
|
aprinter_encode.py
|
#!/usr/bin/env python2.7
from __future__ import print_function
from __future__ import with_statement
import struct
class GcodeSyntaxError(Exception):
pass
EncodeLineErrors = GcodeSyntaxError
def encode_line(line):
line = line.strip()
if len(line) == 0:
return ''
parts = line.split()
cmd_letter = parts[0][0]
if cmd_letter == 'E':
return chr(0xE0)
if not _letter_ok(cmd_letter):
raise GcodeSyntaxError('invalid command letter')
try:
cmd_number = int(parts[0][1:])
except ValueError:
raise GcodeSyntaxError('invalid command number')
if not (cmd_number >= 0 and cmd_number < 2048):
raise GcodeSyntaxError('invalid command number')
packet_index = ''
packet_payload = ''
num_params = len(parts) - 1
if num_params > 14:
raise GcodeSyntaxError('too many parameters')
for part in parts[1:]:
param_letter = part[0]
if not _letter_ok(param_letter):
raise GcodeSyntaxError('invalid parameter letter')
param_value = part[1:]
if param_value == '':
encode_as = 'void'
else:
encode_as = 'integer'
try:
integer_value = int(param_value)
if not (integer_value >= 0 and integer_value < 2**64):
raise ValueError()
except ValueError:
encode_as = 'real'
try:
real_value = float(param_value)
except ValueError:
raise GcodeSyntaxError('invalid command argument')
if encode_as == 'void':
type_code = 5
param_payload = ''
elif encode_as == 'integer':
if integer_value < 2**32:
type_code = 3
param_payload = struct.pack('<I', integer_value)
else:
type_code = 4
param_payload = struct.pack('<Q', integer_value)
elif encode_as == 'real':
type_code = 1
param_payload = struct.pack('<f', real_value)
packet_index += chr((type_code << 5) + (ord(param_letter) - ord('A')))
packet_payload += param_payload
if (cmd_letter, cmd_number) in _SmallCommands:
command_type_code = _SmallCommands[(cmd_letter, cmd_number)]
packet_header_large = ''
else:
command_type_code = 15
packet_header_large = struct.pack('BB', ((ord(cmd_letter) - ord('A')) << 3) + (cmd_number >> 8), (cmd_number & 0xFF))
packet_header = struct.pack('B', (command_type_code << 4) + num_params) + packet_header_large
packet = packet_header + packet_index + packet_payload
return packet
EncodeFileErrors = (IOError, GcodeSyntaxError)
def encode_file(input_file_name, output_file_name):
line_num = 0
with open(input_file_name, "r") as input_file:
with open(output_file_name, "w") as output_file:
for line in input_file:
line_num += 1
try:
encoded_data = encode_line(line)
except GcodeSyntaxError as e:
e.args = ('line {}: {}'.format(line_num, e.args[0]),)
raise
output_file.write(encoded_data)
output_file.write(chr(0xE0))
_SmallCommands = {
('G', 0) : 1,
('G', 1) : 2,
('G', 92) : 3
}
def _letter_ok(ch):
return (ord(ch) >= ord('A') and ord(ch) <= ord('Z'))
def main():
import argparse
parser = argparse.ArgumentParser(description='G-code packet for APrinter firmware.')
parser.add_argument('--input', required=True)
parser.add_argument('--output', required=True)
args = parser.parse_args()
encode_file(args.input, args.output)
if __name__ == '__main__':
main()
|
Python
| 0
|
@@ -211,24 +211,122 @@
line(line):%0A
+ comment_index = line.find(';')%0A if comment_index %3E= 0:%0A line = line%5B:comment_index%5D%0A
line = l
|
84509f2c099d156aca530b311b1edb34c7f7fe69
|
version increment
|
module/plugins/hooks/Ev0InFetcher.py
|
module/plugins/hooks/Ev0InFetcher.py
|
# -*- coding: utf-8 -*-
"""
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License,
or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, see <http://www.gnu.org/licenses/>.
@author: mkaay
"""
from module.lib import feedparser
from time import mktime, time
from module.plugins.Hook import Hook
from module.plugins.PluginStorage import PluginStorage
class Ev0InFetcher(Hook, PluginStorage):
__name__ = "Ev0InFetcher"
__version__ = "0.1"
__description__ = """checks rss feeds for ev0.in"""
__config__ = [("activated", "bool", "Activated", "False"),
("interval", "int", "Check interval in minutes", "10"),
("queue", "bool", "Move new shows directly to Queue", False),
("shows", "str", "Shows to check for (comma seperated)", ""),
("quality", "xvid;x264;rmvb", "Video Format", "xvid"),
("hoster", "str", "Hoster to use (comma seperated)", "NetloadIn,RapidshareCom,MegauploadCom,HotfileCom")]
__author_name__ = ("mkaay")
__author_mail__ = ("mkaay@mkaay.de")
def setup(self):
self.interval = self.getConfig("interval") * 60
def filterLinks(self, links):
results = self.core.pluginManager.parseUrls(links)
sortedLinks = {}
for url, hoster in results:
if not sortedLinks.has_key(hoster):
sortedLinks[hoster] = []
sortedLinks[hoster].append(url)
for h in self.getConfig("hoster").split(","):
try:
return sortedLinks[h.strip()]
except:
continue
return []
def periodical(self):
def normalizefiletitle(filename):
filename = filename.replace('.', ' ')
filename = filename.replace('_', ' ')
filename = filename.lower()
return filename
shows = [s.strip() for s in self.getConfig("shows").split(",")]
feed = feedparser.parse("http://feeds.feedburner.com/ev0in/%s?format=xml" % self.getConfig("quality"))
showStorage = {}
for show in shows:
showStorage[show] = int(self.getStorage("show_%s_lastfound" % show, 0))
found = False
for item in feed['items']:
for show, lastfound in showStorage.iteritems():
if show.lower() in normalizefiletitle(item['title']) and lastfound < int(mktime(item.date_parsed)):
links = self.filterLinks(item['description'].split("<br />"))
packagename = item['title'].encode("utf-8")
self.core.log.info("Ev0InFetcher: new episode '%s' (matched '%s')" % (packagename, show))
self.core.server_methods.add_package(packagename, links, queue=(1 if self.getConfig("queue") else 0))
self.setStorage("show_%s_lastfound" % show, int(mktime(item.date_parsed)))
found = True
if not found:
self.core.log.debug("Ev0InFetcher: no new episodes found")
for show, lastfound in showStorage.iteritems():
if lastfound > 0 and lastfound + (3600*24*30) < int(time()):
self.delStorage("show_%s_lastfound" % show)
self.core.log.debug("Ev0InFetcher: cleaned '%s' record" % show)
|
Python
| 0.000002
|
@@ -945,17 +945,17 @@
__ = %220.
-1
+2
%22%0A __
|
10ce4afb555375e2cf101c8e89b53f398ba67c8c
|
Update FilepostCom.py
|
module/plugins/hoster/FilepostCom.py
|
module/plugins/hoster/FilepostCom.py
|
# -*- coding: utf-8 -*-
"""
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License,
or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, see <http://www.gnu.org/licenses/>.
@author: zoidberg
changelog:
0.27 - 2012-08-12 - hgg
fix "global name 'js_answer' is not defined" bug
fix captcha bug #1 (failed on non-english "captcha wrong" errors)
"""
import re
from time import time
from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
from module.plugins.internal.CaptchaService import ReCaptcha
from module.common.json_layer import json_loads
class FilepostCom(SimpleHoster):
__name__ = "FilepostCom"
__type__ = "hoster"
__pattern__ = r'https?://(?:www\.)?(?:filepost\.com/files|fp.io)/([^/]+).*'
__version__ = "0.27"
__description__ = """Filepost.com hoster plugin"""
__author_name__ = "zoidberg"
__author_mail__ = "zoidberg@mujmail.cz"
FILE_INFO_PATTERN = r'<input type="text" id="url" value=\'<a href[^>]*>(?P<N>[^>]+?) - (?P<S>[0-9\.]+ [kKMG]i?B)</a>\' class="inp_text"/>'
#FILE_INFO_PATTERN = r'<h1>(?P<N>[^<]+)</h1>\s*<div class="ul">\s*<ul>\s*<li><span>Size:</span> (?P<S>[0-9.]+) (?P<U>[kKMG])i?B</li>'
FILE_OFFLINE_PATTERN = r'class="error_msg_title"> Invalid or Deleted File. </div>|<div class="file_info file_info_deleted">'
RECAPTCHA_KEY_PATTERN = r"Captcha.init\({\s*key:\s*'([^']+)'"
FLP_TOKEN_PATTERN = r"set_store_options\({token: '([^']+)'"
def handleFree(self):
# Find token and captcha key
file_id = re.match(self.__pattern__, self.pyfile.url).group(1)
found = re.search(self.FLP_TOKEN_PATTERN, self.html)
if not found:
self.parseError("Token")
flp_token = found.group(1)
found = re.search(self.RECAPTCHA_KEY_PATTERN, self.html)
if not found:
self.parseError("Captcha key")
captcha_key = found.group(1)
# Get wait time
get_dict = {'SID': self.req.cj.getCookie('SID'), 'JsHttpRequest': str(int(time() * 10000)) + '-xml'}
post_dict = {'action': 'set_download', 'token': flp_token, 'code': file_id}
wait_time = int(self.getJsonResponse(get_dict, post_dict, 'wait_time'))
if wait_time > 0:
self.wait(wait_time)
post_dict = {"token": flp_token, "code": file_id, "file_pass": ''}
if 'var is_pass_exists = true;' in self.html:
# Solve password
for file_pass in self.getPassword().splitlines():
get_dict['JsHttpRequest'] = str(int(time() * 10000)) + '-xml'
post_dict['file_pass'] = file_pass
self.logInfo("Password protected link, trying " + file_pass)
download_url = self.getJsonResponse(get_dict, post_dict, 'link')
if download_url:
break
else:
self.fail("No or incorrect password")
else:
# Solve recaptcha
recaptcha = ReCaptcha(self)
for i in xrange(5):
get_dict['JsHttpRequest'] = str(int(time() * 10000)) + '-xml'
if i:
post_dict["recaptcha_challenge_field"], post_dict["recaptcha_response_field"] = recaptcha.challenge(
captcha_key)
self.logDebug(u"RECAPTCHA: %s : %s : %s" % (
captcha_key, post_dict["recaptcha_challenge_field"], post_dict["recaptcha_response_field"]))
download_url = self.getJsonResponse(get_dict, post_dict, 'link')
if download_url:
if i:
self.correctCaptcha()
break
elif i:
self.invalidCaptcha()
else:
self.fail("Invalid captcha")
# Download
self.download(download_url)
def getJsonResponse(self, get_dict, post_dict, field):
json_response = json_loads(self.load('https://filepost.com/files/get/', get=get_dict, post=post_dict))
self.logDebug(json_response)
if not 'js' in json_response:
self.parseError('JSON %s 1' % field)
# i changed js_answer to json_response['js'] since js_answer is nowhere set.
# i don't know the JSON-HTTP specs in detail, but the previous author
# accessed json_response['js']['error'] as well as js_answer['error'].
# see the two lines commented out with "# ~?".
if 'error' in json_response['js']:
if json_response['js']['error'] == 'download_delay':
self.retry(wait_time=json_response['js']['params']['next_download'])
# ~? self.retry(wait_time=js_answer['params']['next_download'])
elif 'Wrong file password' in json_response['js']['error']:
return None
elif 'You entered a wrong CAPTCHA code' in json_response['js']['error']:
return None
elif 'CAPTCHA Code nicht korrekt' in json_response['js']['error']:
return None
elif 'CAPTCHA' in json_response['js']['error']:
self.logDebug('error response is unknown, but mentions CAPTCHA -> return None')
return None
else:
self.fail(json_response['js']['error'])
# ~? self.fail(js_answer['error'])
if not 'answer' in json_response['js'] or not field in json_response['js']['answer']:
self.parseError('JSON %s 2' % field)
return json_response['js']['answer'][field]
getInfo = create_getInfo(FilepostCom)
|
Python
| 0
|
@@ -1291,17 +1291,17 @@
_ = %220.2
-7
+8
%22%0A __
@@ -1966,16 +1966,141 @@
%5B%5E'%5D+)'%22
+%0A PREMIUM_ONLY_PATTERN = r'members only. Please upgrade to premium%7Ca premium membership is required to download this file'
%0A%0A de
|
eff9a7fa2c25739926a8c583c51f30fee66185c9
|
return plugin name changed at loading
|
keystoneauth_oidc_refreshtoken/loading.py
|
keystoneauth_oidc_refreshtoken/loading.py
|
# coding=utf-8
# Copyright 2017 JOSÉ JOAQUÍN ESCOBAR GÓMEZ
# File: loading.py
# Description:
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from keystoneauth1 import loading
from keystoneauth1.loading._plugins.identity import v3
from keystoneauth_oidc_refreshtoken import plugin
class OpenIDConnectRefreshToken(v3._OpenIDConnectBase):
@property
def plugin_class(self):
return plugin.v3oidcrefreshtoken
def get_options(self):
options = super(OpenIDConnectRefreshToken, self).get_options()
options.extend([
loading.Opt('refresh_token', required=True,
help='OAuth 2.0 Refresh Token')
])
return options
|
Python
| 0
|
@@ -927,22 +927,20 @@
gin.
-v3oidcr
+OidcR
efresh
-t
+T
oken
|
b978d2a1f2f9cc9942971a6e252ccd1209a9269b
|
remove message (#8163)
|
pytorch_lightning/metrics/__init__.py
|
pytorch_lightning/metrics/__init__.py
|
# Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pytorch_lightning.metrics.classification import ( # noqa: F401
Accuracy,
AUC,
AUROC,
AveragePrecision,
ConfusionMatrix,
F1,
FBeta,
HammingDistance,
IoU,
Precision,
PrecisionRecallCurve,
Recall,
ROC,
StatScores,
)
from pytorch_lightning.metrics.metric import Metric, MetricCollection # noqa: F401
from pytorch_lightning.metrics.regression import ( # noqa: F401
ExplainedVariance,
MeanAbsoluteError,
MeanSquaredError,
MeanSquaredLogError,
PSNR,
R2Score,
SSIM,
)
from pytorch_lightning.utilities import rank_zero_deprecation
rank_zero_deprecation(
"`pytorch_lightning.metrics.*` module has been renamed to `torchmetrics.*` and split off to its own package"
" (https://github.com/PyTorchLightning/metrics) since v1.3 and will be removed in v1.5"
)
|
Python
| 0
|
@@ -1136,297 +1136,4 @@
,%0A)%0A
-from pytorch_lightning.utilities import rank_zero_deprecation%0A%0Arank_zero_deprecation(%0A %22%60pytorch_lightning.metrics.*%60 module has been renamed to %60torchmetrics.*%60 and split off to its own package%22%0A %22 (https://github.com/PyTorchLightning/metrics) since v1.3 and will be removed in v1.5%22%0A)%0A
|
d6b9cc4acb4800aa63cc91957c05c75312a081e5
|
update language_by_size from trunk r9110, add new sq-site
|
pywikibot/families/wikinews_family.py
|
pywikibot/families/wikinews_family.py
|
# -*- coding: utf-8 -*-
from pywikibot import family
__version__ = '$Id$'
# The Wikimedia family that is known as Wikinews
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
self.name = 'wikinews'
self.languages_by_size = [
'sr', 'en', 'pl', 'de', 'fr', 'it', 'es', 'pt', 'zh', 'ja', 'sv',
'ru', 'ta', 'fi', 'cs', 'he', 'ro', 'bg', 'ar', 'hu', 'sd', 'tr',
'uk', 'ca', 'fa', 'no', 'bs', 'th', 'ko', 'eo',
]
for lang in self.languages_by_size:
self.langs[lang] = '%s.wikinews.org' % lang
self.obsolete = {
'jp': 'ja',
'nb': 'no',
'nl': None, # https://bugzilla.wikimedia.org/show_bug.cgi?id=20325
'zh-tw': 'zh',
'zh-cn': 'zh'
}
# Which languages have a special order for putting interlanguage links,
# and what order is it? If a language is not in interwiki_putfirst,
# alphabetical order on language code is used. For languages that are in
# interwiki_putfirst, interwiki_putfirst is checked first, and
# languages are put in the order given there. All other languages are put
# after those, in code-alphabetical order.
self.interwiki_putfirst = {
'en': self.alphabetic,
'fi': self.alphabetic,
'fr': self.alphabetic,
'he': ['en'],
'hu': ['en'],
'pl': self.alphabetic,
}
# Global bot allowed languages on http://meta.wikimedia.org/wiki/Bot_policy/Implementation#Current_implementation
self.cross_allowed = ['cs', 'hu',]
# CentralAuth cross avaliable projects.
self.cross_projects = [
'wikipedia', 'wiktionary', 'wikibooks', 'wikiquote', 'wikisource', 'wikiversity',
'meta', 'mediawiki', 'test', 'incubator', 'commons', 'species'
]
def code2encoding(self, code):
return 'utf-8'
def version(self, code):
return '1.17wmf1'
def shared_image_repository(self, code):
return ('commons', 'commons')
|
Python
| 0
|
@@ -456,15 +456,15 @@
', '
-c
+f
a', '
-f
+c
a',
@@ -468,16 +468,22 @@
', 'no',
+ 'sq',
'bs', '
|
06a851590f32acad0bc1e5b0d87cc4b1148b644c
|
Add unique index to patient_numbers
|
radar/radar/models/patient_numbers.py
|
radar/radar/models/patient_numbers.py
|
from sqlalchemy import Column, Integer, ForeignKey, String, Index
from sqlalchemy.orm import relationship
from radar.database import db
from radar.models import MetaModelMixin
from radar.models.common import uuid_pk_column, patient_id_column, patient_relationship
class PatientNumber(db.Model, MetaModelMixin):
__tablename__ = 'patient_numbers'
id = uuid_pk_column()
patient_id = patient_id_column()
patient = patient_relationship('patient_numbers')
data_source_id = Column(Integer, ForeignKey('data_sources.id'), nullable=False)
data_source = relationship('DataSource')
organisation_id = Column(Integer, ForeignKey('organisations.id'), nullable=False)
organisation = relationship('Organisation')
number = Column(String, nullable=False)
# TODO add unique index on data_source_id, organisation_id, number
Index('patient_numbers_patient_id_idx', PatientNumber.patient_id)
Index('patient_numbers_organisation_id_idx', PatientNumber.organisation_id)
|
Python
| 0.998605
|
@@ -781,33 +781,141 @@
%0A%0A#
-TODO add unique index on
+Data source, organisation and number must be unique%0AIndex('patient_numbers_data_source_id_organisation_id_number_idx', PatientNumber.
data
@@ -926,16 +926,30 @@
rce_id,
+PatientNumber.
organisa
@@ -957,22 +957,50 @@
ion_id,
-number
+PatientNumber.number, unique=True)
%0A%0AIndex(
|
bd1a953b579440cfda295ae7a4aa1d5986a558d7
|
replace yield_fixture decorator by fixture
|
myhdl/test/core/test_traceSignals.py
|
myhdl/test/core/test_traceSignals.py
|
# This file is part of the myhdl library, a Python package for using
# Python as a Hardware Description Language.
#
# Copyright (C) 2003-2008 Jan Decaluwe
#
# The myhdl library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 2.1 of the
# License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
""" Run the unit tests for traceSignals """
import os
import random
import pytest
from myhdl import block, Signal, Simulation, _simulator, delay, instance, intbv
from myhdl._traceSignals import TraceSignalsError, _error, traceSignals
from helpers import raises_kind
random.seed(1) # random, but deterministic
path = os.path
QUIET=1
@block
def gen(clk):
@instance
def logic():
while 1:
yield delay(10)
clk.next = not clk
return logic
@block
def fun():
clk = Signal(bool(0))
inst = gen(clk)
return inst
@block
def dummy():
clk = Signal(bool(0))
inst = gen(clk)
return 1
@block
def top():
inst = traceSignals(fun())
return inst
@block
def top2():
inst = [{} for i in range(4)]
j = 3
inst[j-2]['key'] = traceSignals(fun())
return inst
@block
def top3():
inst_1 = traceSignals(fun())
inst_2 = traceSignals(fun())
return inst_1, inst_2
@block
def genTristate(clk, x, y, z):
xd = x.driver()
yd = y.driver()
zd = z.driver()
@instance
def ckgen():
while 1:
yield delay(10)
clk.next = not clk
@instance
def logic():
for v in [True, False, None, 0, True, None, None, 1]:
yield clk.posedge
xd.next = v
if v is None:
yd.next = zd.next = None
elif v:
yd.next = zd.next = 11
else:
yd.next = zd.next = 0
return ckgen,logic
@block
def tristate():
from myhdl import TristateSignal
clk = Signal(bool(0))
x = TristateSignal(True) # single bit
y = TristateSignal(intbv(0)) # intbv with undefined width
z = TristateSignal(intbv(0)[8:]) # intbv with fixed width
inst = genTristate(clk, x, y, z)
return inst
@block
def topTristate():
inst = traceSignals(tristate())
return inst
@pytest.yield_fixture
def vcd_dir(tmpdir):
with tmpdir.as_cwd():
yield tmpdir
if _simulator._tracing:
_simulator._tf.close()
_simulator._tracing = 0
class TestTraceSigs:
# TODO: multiple trace handling is different now has the
# calls go bottom-up. To be revisited.
# def testMultipleTraces(self, vcd_dir):
# with raises_kind(TraceSignalsError, _error.MultipleTraces):
# dut = top3()
def testArgType1(self, vcd_dir):
with raises_kind(TraceSignalsError, _error.ArgType):
dut = traceSignals([1, 2])
# this test is no longer relevant
# def testReturnVal(self, vcd_dir):
# from myhdl import ExtractHierarchyError
# from myhdl._extractHierarchy import _error
# kind = _error.InconsistentToplevel % (2, "dummy")
# with raises_kind(ExtractHierarchyError, kind):
# dut = traceSignals(dummy())
def testHierarchicalTrace1(self, vcd_dir):
p = "%s.vcd" % fun.__name__
top()
assert path.exists(p)
def testHierarchicalTrace2(self, vcd_dir):
pdut = "%s.vcd" % top.__name__
psub = "%s.vcd" % fun.__name__
dut = traceSignals(top())
assert path.exists(pdut)
assert not path.exists(psub)
def testTristateTrace(self, vcd_dir):
sim = Simulation(topTristate())
sim.run(100, quiet=QUIET)
sim.quit()
def testBackupOutputFile(self, vcd_dir):
p = "%s.vcd" % fun.__name__
dut = traceSignals(fun())
sim = Simulation(dut)
sim.run(1000, quiet=QUIET)
sim.quit()
_simulator._tf.close()
_simulator._tracing = 0
size = path.getsize(p)
pbak = p[:-4] + '.' + str(path.getmtime(p)) + '.vcd'
assert not path.exists(pbak)
dut = traceSignals(fun())
_simulator._tf.close()
_simulator._tracing = 0
assert path.exists(p)
assert path.exists(pbak)
assert path.getsize(pbak) == size
assert path.getsize(p) < size
def testSetDirectory(self, vcd_dir):
traceSignals.directory = 'some_vcd_dir'
os.mkdir(path.join(str(vcd_dir), traceSignals.directory))
pdut = "%s.vcd" % top.__name__
psub = "%s.vcd" % fun.__name__
pdutd = path.join(traceSignals.directory, "%s.vcd" % top.__name__)
psubd = path.join(traceSignals.directory, "%s.vcd" % fun.__name__)
dut = traceSignals(top())
_simulator._tf.close()
_simulator._tracing = 0
traceSignals.directory = None
assert not path.exists(pdut)
assert not path.exists(psub)
assert path.exists(pdutd)
assert not path.exists(psubd)
|
Python
| 0
|
@@ -2817,14 +2817,8 @@
est.
-yield_
fixt
|
095ea9ba17c5bb68dcd4e0003864e04077aa3ba3
|
Fix pylint
|
dataset/models/tf/unet.py
|
dataset/models/tf/unet.py
|
"""Contains class for UNet"""
import tensorflow as tf
import numpy as np
from .layers import conv_block
from . import TFModel
class UNet(TFModel):
""" UNet
https://arxiv.org/abs/1505.04597 (O.Ronneberger et al, 2015)
**Configuration**
inputs : dict
dict with keys 'images' and 'masks' (see :meth:`._make_inputs`)
batch_norm : None or dict
parameters for batch normalization layers.
If None, remove batch norm layers whatsoever.
Default is ``{'momentum': 0.1}``.
filters : int
number of filters after the first convolution (64 by default)
num_blocks : int
number of downsampling/upsampling blocks (4 by default)
"""
def _build(self):
names = ['images', 'masks']
_, inputs = self._make_inputs(names)
num_classes = self.num_classes('masks')
data_format = self.data_format('images')
dim = self.spatial_dim('images')
batch_norm = self.get_from_config('batch_norm', {'momentum': 0.1})
filters = self.get_from_config('filters', 64)
num_blocks = self.get_from_config('num_blocks', 4)
kwargs = {'data_format': data_format, 'training': self.is_training}
if batch_norm:
kwargs['batch_norm'] = batch_norm
x = self.input_block(dim, inputs['images'], filters, **kwargs)
layers_filters = 2 ** np.arange(num_blocks) * filters * 2
x = self.body(dim, x, layers_filters, **kwargs)
output = self.head(dim, x, filters, num_classes, **kwargs)
tf.nn.softmax(output, name='predicted_proba')
@classmethod
def body(cls, dim, inputs, filters, **kwargs):
""" UNet body
Parameters
----------
dim : int {1, 2, 3}
input spatial dimensionionaly
inputs : tf.Tensor
input tensor
filters : tuple of int
number of filters in downsampling blocks
name : str
scope name
Return
------
tf.Tensor
"""
encoder_outputs = [inputs]
for i, ifilters in enumerate(filters):
x = cls.downsampling_block(dim, x, ifilters, 'downsampling-'+str(i), **kwargs)
encoder_outputs.append(x)
x = conv_block(dim, x, filters[-1]//2, 2, 't', 'middle', strides=2, **kwargs)
axis = -1 if kwargs['data_format'] == 'channels_last' else 1
for i, ifilters in enumerate(filters[::-1][1:]):
x = tf.concat([encoder_outputs[-i-2], x], axis=axis)
x = cls.upsampling_block(dim, x, ifilters, 'upsampling-'+str(i), **kwargs)
return x
@classmethod
def head(_, dim, inputs, filters, num_classes, **kwargs):
""" Two 3x3 convolutions and 1x1 convolution
Parameters
----------
dim : int {1, 2, 3}
input spatial dimensionionaly
inputs : tf.Tensor
input tensor
filters : int
number of filters in 3x3 convolutions
num_classes : int
number of classes (and number of filters in the last 1x1 convolution)
name : str
scope name
Return
------
tf.Tensor
"""
layout = 'cnacna' if 'batch_norm' in kwargs else 'caca'
x = conv_block(dim, inputs, [filters, filters, num_classes], [3, 3, 1], layout+'c', 'output', **kwargs)
x = tf.identity(x, 'predictions')
return x
@staticmethod
def input_block(dim, inputs, filters, **kwargs):
""" 3x3 convolution
Parameters
----------
dim : int {1, 2, 3}
input spatial dimensionionaly
inputs : tf.Tensor
input tensor
filters : int
number of output filters
name : str
scope name
Return
------
tf.Tensor
"""
layout = 'cnacna' if 'batch_norm' in kwargs else 'caca'
x = conv_block(dim, inputs, filters, 3, layout, 'input', **kwargs)
return x
@staticmethod
def downsampling_block(dim, inputs, filters, name, **kwargs):
""" 2x2 max pooling with stride 2 and two 3x3 convolutions
Parameters
----------
dim : int {1, 2, 3}
input spatial dimensionionaly
inputs : tf.Tensor
input tensor
filters : int
number of output filters
name : str
scope name
Return
------
tf.Tensor
"""
layout = 'pcnacna' if 'batch_norm' in kwargs else 'pcaca'
with tf.variable_scope(name):
x = conv_block(dim, inputs, filters, 3, layout, name, pool_size=2, **kwargs)
return x
@staticmethod
def upsampling_block(dim, inputs, filters, name, **kwargs):
""" 3x3 convolution and 2x2 transposed convolution
Parameters
----------
dim : int {1, 2, 3}
input spatial dimensionionaly
inputs : tf.Tensor
input tensor
filters : int
number of output filters
name : str
scope name
Return
------
tf.Tensor
"""
layout = 'cnacna' if 'batch_norm' in kwargs else 'caca'
with tf.variable_scope(name):
x = conv_block(dim, inputs, 2*filters, 3, layout, 'conv', **kwargs)
x = conv_block(dim, x, filters, 2, 't', 'transposed', strides=2, **kwargs)
return x
|
Python
| 0.000099
|
@@ -2016,32 +2016,51 @@
sor%0A %22%22%22%0A
+ x = inputs%0A
encoder_
@@ -2070,22 +2070,17 @@
puts = %5B
-inputs
+x
%5D%0A
@@ -2664,17 +2664,19 @@
ef head(
-_
+cls
, dim, i
|
c1dae75376ba3293805b2d8a79b08a9138d3bb9c
|
Mark cymru test as expceted fail
|
intelmq/tests/bots/experts/cymru_whois/test_expert.py
|
intelmq/tests/bots/experts/cymru_whois/test_expert.py
|
# -*- coding: utf-8 -*-
import unittest
import intelmq.lib.test as test
from intelmq.bots.experts.cymru_whois.expert import CymruExpertBot
EXAMPLE_INPUT = {"__type": "Event",
"source.ip": "93.184.216.34", # example.com
"time.observation": "2015-01-01T00:00:00+00:00",
}
EXAMPLE_OUTPUT = {"__type": "Event",
"source.ip": "93.184.216.34",
"source.geolocation.cc": "EU",
"source.registry": "RIPE",
"source.network": "93.184.216.0/24",
"source.allocated": "2008-06-02T00:00:00+00:00",
"source.asn": 15133,
"source.as_name": "EDGECAST - MCI Communications Services, Inc. d/b/a Verizon Business, US",
"time.observation": "2015-01-01T00:00:00+00:00",
}
EXAMPLE_INPUT6 = {"__type": "Event",
"destination.ip": "2001:500:88:200::8", # iana.org
"time.observation": "2015-01-01T00:00:00+00:00",
}
EXAMPLE_OUTPUT6 = {"__type": "Event",
"destination.ip": "2001:500:88:200::8", # iana.org
"destination.registry": "ARIN",
"destination.allocated": "2010-02-18T00:00:00+00:00",
"destination.as_name": "ICANN-DC - ICANN, US",
"destination.geolocation.cc": "US",
"time.observation": "2015-01-01T00:00:00+00:00",
"destination.asn": 16876,
"destination.network": "2001:500:88::/48",
}
UNICODE_INPUT = {"__type": "Event",
"destination.ip": "200.236.128.1",
"time.observation": "2015-01-01T00:00:00+00:00",
}
UNICODE_OUTPUT = {"__type": "Event",
"destination.ip": "200.236.128.1",
"time.observation": "2015-01-01T00:00:00+00:00",
"destination.registry": "LACNIC",
"destination.allocated": "2000-02-15T00:00:00+00:00",
"destination.as_name": "Fundação de Desenvolvimento da Pesquisa, BR",
"destination.geolocation.cc": "BR",
"destination.asn": 10417,
"destination.network": "200.236.128.0/18",
}
EMPTY_INPUT = {"__type": "Event",
"source.ip": "198.105.125.77", # no result
"time.observation": "2015-01-01T00:00:00+00:00",
}
NO_ASN_INPUT = {"__type": "Event",
"source.ip": "212.92.127.126",
"time.observation": "2015-01-01T00:00:00+00:00",
}
NO_ASN_OUTPUT = {"__type": "Event",
"source.ip": "212.92.127.126",
"time.observation": "2015-01-01T00:00:00+00:00",
"source.asn": 23456,
"source.geolocation.cc": 'RU',
"source.ip": '212.92.127.126',
"source.network": '212.92.127.0/24',
"source.registry": 'RIPE',
}
@test.skip_redis()
@test.skip_internet()
class TestCymruExpertBot(test.BotTestCase, unittest.TestCase):
"""
A TestCase for AbusixExpertBot.
"""
@classmethod
def set_bot(cls):
cls.bot_reference = CymruExpertBot
cls.use_cache = True
def test_ipv4_lookup(self):
self.input_message = EXAMPLE_INPUT
self.run_bot()
self.assertMessageEqual(0, EXAMPLE_OUTPUT)
def test_ipv6_lookup(self):
self.input_message = EXAMPLE_INPUT6
self.run_bot()
self.assertMessageEqual(0, EXAMPLE_OUTPUT6)
def test_unicode_as_name(self):
self.input_message = UNICODE_INPUT
self.run_bot()
self.assertMessageEqual(0, UNICODE_OUTPUT)
def test_empty_result(self):
self.input_message = EMPTY_INPUT
self.run_bot()
self.assertMessageEqual(0, EMPTY_INPUT)
def test_missing_asn(self):
"""
No information for ASN.
https://github.com/certtools/intelmq/issues/635
"""
self.input_message = NO_ASN_INPUT
self.run_bot()
self.assertMessageEqual(0, NO_ASN_OUTPUT)
if __name__ == '__main__': # pragma: no cover
unittest.main()
|
Python
| 0.000001
|
@@ -3930,24 +3930,54 @@
PTY_INPUT)%0A%0A
+ @unittest.expectedFailure%0A
def test
|
e44115c6785fcc80f378e61e98e4ccd32cc9498f
|
Renames `update_step()` to `train_step()`.
|
flax/training/train_state.py
|
flax/training/train_state.py
|
# Copyright 2021 The Flax Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any, Callable
from flax import core
from flax import struct
import optax
class TrainState(struct.PyTreeNode):
"""Simple train state for the common case with a single Optax optimizer.
Synopsis:
state = TrainState.create(
apply_fn=model.apply,
params=variables['params'],
tx=tx)
grad_fn = jax.grad(make_loss_fn(state.apply_fn))
for batch in data:
grads = grad_fn(state.params, batch)
state = state.apply_gradients(grads=grads)
Note that you can easily extend this dataclass by subclassing it for storing
additional data (e.g. additional variable collections).
For more exotic usecases (e.g. multiple optimizers) it's probably best to
fork the class and modify it.
Attributes:
step: Counter starts at 0 and is incremented by every call to
`.apply_gradients()`.
apply_fn: Usually set to `model.apply()`. Kept in this dataclass for
convenience to have a shorter params list for `update_step()`.
tx: An Optax gradient transformation.
opt_state: The state for `tx`.
"""
step: int
apply_fn: Callable = struct.field(pytree_node=False)
params: core.FrozenDict[str, Any]
tx: optax.GradientTransformation = struct.field(pytree_node=False)
opt_state: optax.OptState
def apply_gradients(self, *, grads, **kwargs):
"""Updates `step`, `params`, `opt_state` and `**kwargs` in return value.
Note that internally this function calls `.tx.update()` followed by a call
to `optax.apply_updates()` to update `params` and `opt_state`.
Args:
grads: Gradients that have the same pytree structure as `.params`.
**kwargs: Additional dataclass attributes that should be `.replace()`-ed.
Returns:
An updated instance of `self` with `step` incremented by one, `params`
and `opt_state` updated by applying `grads`, and additional attributes
replaced as specified by `kwargs`.
"""
updates, new_opt_state = self.tx.update(
grads, self.opt_state, self.params)
new_params = optax.apply_updates(self.params, updates)
return self.replace(
step=self.step + 1,
params=new_params,
opt_state=new_opt_state,
**kwargs,
)
@classmethod
def create(cls, *, apply_fn, params, tx, **kwargs):
"""Creates a new instance with `step=0` and initialized `opt_state`."""
opt_state = tx.init(params)
return cls(
step=0,
apply_fn=apply_fn,
params=params,
tx=tx,
opt_state=opt_state,
**kwargs,
)
|
Python
| 0.999865
|
@@ -1561,23 +1561,63 @@
for
-%60update_step()%60
+the %60train_step()%60 function%0A in your training loop
.%0A
|
90f903e887fb0cfe9ca68e4dd0bcf811ea35ceb3
|
add dcgan and superresolution
|
scripts/nb_duet_test.py
|
scripts/nb_duet_test.py
|
# For creating Duet tests from notebooks, adapt your notebooks to the following rules:
# 1. The DO/DS notebooks have a predefined format:
# <test_name>_Data_Owner.ipynb and <test_name>_Data_Scientist.ipynb.
#
# 2. Each notebook must have special markdown cells called Checkpoints, with the
# following properties:
# - The cell begins with the text "Checkpoint <int>: "
# used by the script for extracting the step number.
# - Each Checkpoint cell acts as a barrier in the generated scripts.
# - In the DO tests, getting to Checkpoint N means creating a file to mark the
# checkpoint and waiting for the DS instance to get the Checkpoint N as well.
# - In the DS tests, getting to Checkpoint N means creating a file to mark the
# checkpoint and waiting for the DO instance to get to Checkpoint N + 1.
# - The DS notebook instance should always end with a checkpoint cell, to wait for
# the DO instance to assert the results.
# stdlib
from collections import defaultdict
import os
from pathlib import Path
import re
import shutil
# third party
from nbconvert import PythonExporter
from nbconvert.writers import FilesWriter
import nbformat
tests = defaultdict(list)
output_dir = Path("tests/syft/notebooks")
checkpoint_dir = Path("tests/syft/notebooks/checkpoints")
try:
os.mkdir(output_dir)
except BaseException as e:
print("os.mkdir failed ", e)
try:
shutil.rmtree(checkpoint_dir)
except BaseException as e:
print("rmtree failed ", e)
try:
os.mkdir(checkpoint_dir)
except BaseException as e:
print("os.mkdir failed ", e)
for path in list(Path("examples/homomorphic-encryption").rglob("*.ipynb")) + list(
Path("examples/duet/dcgan").rglob("*.ipynb")
):
if ".ipynb_checkpoints" in str(path):
continue
testname = re.sub("[^0-9a-zA-Z]+", "_", str(path))
output = output_dir / testname
file_name = str(path.stem)
is_do = False
is_ds = False
if file_name.endswith("_Data_Scientist"):
testcase = file_name.replace("_Data_Scientist", "")
tests[testcase].append(testname)
is_ds = True
elif file_name.endswith("_Data_Owner"):
testcase = file_name.replace("_Data_Owner", "")
tests[testcase].append(testname)
is_do = True
else:
continue
notebook_nodes = nbformat.read(path, as_version=4)
custom_cell = nbformat.v4.new_code_cell(
source="""
import os
import time
import asyncio
from pathlib import Path
loop = asyncio.get_event_loop()
"""
)
notebook_nodes["cells"].insert(0, custom_cell)
for idx, cell in enumerate(notebook_nodes["cells"]):
if cell["cell_type"] == "code" and "loopback=True" in cell["source"]:
notebook_nodes["cells"][idx]["source"] = cell["source"].replace(
"loopback=True", 'loopback=True, network_url=f"http://127.0.0.1:21000"'
)
if cell["cell_type"] == "markdown" and "Checkpoint" in cell["source"]:
checkpoint = (
cell["source"]
.lower()
.split("checkpoint")[1]
.strip()
.split(":")[0]
.strip()
)
# For DO, we wait until DS gets to the same checkpoint
if is_do:
ck_file = "checkpoints/" + (
testcase + "_DO_checkpoint_" + str(checkpoint)
)
wait_file = "checkpoints/" + (
testcase + "_DS_checkpoint_" + str(checkpoint)
)
checkpoint_cell = nbformat.v4.new_code_cell(
source=f"""
Path(\"{ck_file}\").touch()
for retry in range(360):
if Path(\"{wait_file}\").exists():
break
task = loop.create_task(asyncio.sleep(1))
loop.run_until_complete(task)
"""
)
# For DS, we wait until DO gets to the next checkpoint
elif is_ds:
ck_file = "checkpoints/" + (
testcase + "_DS_checkpoint_" + str(checkpoint)
)
wait_file = "checkpoints/" + (
testcase + "_DO_checkpoint_" + str(int(checkpoint) + 1)
)
checkpoint_cell = nbformat.v4.new_code_cell(
source=f"""
Path(\"{ck_file}\").touch()
for retry in range(360):
if Path(\"{wait_file}\").exists():
break
task = loop.create_task(asyncio.sleep(1))
loop.run_until_complete(task)
assert Path(\"{wait_file}\").exists()
"""
)
notebook_nodes["cells"][idx] = checkpoint_cell
try:
exporter = PythonExporter()
(body, resources) = exporter.from_notebook_node(notebook_nodes)
write_file = FilesWriter()
write_file.write(output=body, resources=resources, notebook_name=str(output))
except Exception as e:
print(f"There was a problem exporting the file(s): {e}")
for case in tests:
test = tests[case]
if len(test) != 2:
print("invalid testcase ", test)
print(case, test)
template = open(output_dir / "duet_test.py.template").read()
output_py = template.replace("{{TESTCASE}}", str(case))
for script in test:
if "Data_Owner" in script:
output_py = output_py.replace("{{DO_SCRIPT}}", script)
elif "Data_Scientist" in script:
output_py = output_py.replace("{{DS_SCRIPT}}", script)
with open(output_dir / f"duet_{case}_test.py", "w") as out_py:
out_py.write(output_py)
|
Python
| 0
|
@@ -1676,16 +1676,20 @@
+ list(%0A
+
Path
@@ -1728,17 +1728,89 @@
.ipynb%22)
-%0A
+) + list(%0A Path(%22examples/duet/super_resolution%22).rglob(%22*.ipynb%22)
):%0A i
|
889a8d8b2bda4b7702949615bc1dd5fd9f5150c0
|
add is_on_extended_trial_plan to Hubspot user fields
|
corehq/apps/analytics/signals.py
|
corehq/apps/analytics/signals.py
|
from django.contrib.auth.signals import user_logged_in
from corehq.apps.accounting.utils import ensure_domain_instance
from corehq.apps.analytics.tasks import (
track_user_sign_in_on_hubspot,
HUBSPOT_COOKIE,
update_hubspot_properties,
)
from corehq.apps.analytics.utils import get_meta
from corehq.util.decorators import handle_uncaught_exceptions
from .tasks import identify
from django.dispatch import receiver
from corehq.apps.users.models import WebUser, CouchUser
from corehq.apps.accounting.models import (
ProBonoStatus,
SoftwarePlanEdition,
Subscription,
)
from corehq.apps.accounting.signals import subscription_upgrade_or_downgrade
from corehq.apps.domain.signals import commcare_domain_post_save
from corehq.apps.users.signals import couch_user_post_save
@receiver(couch_user_post_save)
def user_save_callback(sender, **kwargs):
couch_user = kwargs.get("couch_user", None)
if couch_user and couch_user.is_web_user():
properties = {}
properties.update(_get_subscription_properties_by_user(couch_user))
properties.update(_get_user_domain_memberships(couch_user))
identify.delay(couch_user.username, properties)
update_hubspot_properties(couch_user, properties)
@receiver(commcare_domain_post_save)
@receiver(subscription_upgrade_or_downgrade)
def domain_save_callback(sender, domain, **kwargs):
domain = ensure_domain_instance(domain)
if domain:
update_subscription_properties_by_domain(domain)
def update_subscription_properties_by_user(couch_user):
properties = _get_subscription_properties_by_user(couch_user)
identify.delay(couch_user.username, properties)
update_hubspot_properties(couch_user, properties)
def _get_subscription_properties_by_user(couch_user):
# Note: using "yes" and "no" instead of True and False because spec calls
# for using these values. (True is just converted to "True" in KISSmetrics)
all_subscriptions = []
subscribed_editions = []
for domain_name in couch_user.domains:
plan_version, subscription = Subscription.get_subscribed_plan_by_domain(domain_name)
subscribed_editions.append(plan_version.plan.edition)
if subscription is not None:
all_subscriptions.append(subscription)
def _is_one_of_editions(edition):
return 'yes' if edition in subscribed_editions else 'no'
def _is_a_pro_bono_status(status):
return 'yes' if status in [s.pro_bono_status for s in all_subscriptions] else 'no'
return {
'is_on_community_plan': _is_one_of_editions(SoftwarePlanEdition.COMMUNITY),
'is_on_standard_plan': _is_one_of_editions(SoftwarePlanEdition.STANDARD),
'is_on_pro_plan': _is_one_of_editions(SoftwarePlanEdition.PRO),
'is_on_advanced_plan': _is_one_of_editions(SoftwarePlanEdition.ADVANCED),
'is_on_enterprise_plan': _is_one_of_editions(SoftwarePlanEdition.ENTERPRISE),
'is_on_pro_bono_or_discounted_plan': _is_a_pro_bono_status(ProBonoStatus.YES),
}
def _get_user_domain_memberships(couch_user):
return {
"number_of_project_spaces": len(couch_user.domains)
}
def update_subscription_properties_by_domain(domain):
affected_users = WebUser.view(
'users/web_users_by_domain', reduce=False, key=domain.name, include_docs=True
).all()
for web_user in affected_users:
update_subscription_properties_by_user(web_user)
@receiver(user_logged_in)
@handle_uncaught_exceptions(mail_admins=True)
def track_user_login(sender, request, user, **kwargs):
couch_user = CouchUser.from_django_user(user)
if couch_user and couch_user.is_web_user():
if not request or HUBSPOT_COOKIE not in request.COOKIES:
# API calls, form submissions etc.
return
meta = get_meta(request)
track_user_sign_in_on_hubspot.delay(couch_user, request.COOKIES, meta)
|
Python
| 0.000001
|
@@ -582,16 +582,38 @@
iption,%0A
+ SubscriptionType,%0A
)%0Afrom c
@@ -2532,24 +2532,214 @@
else 'no'%0A%0A
+ def _is_on_extended_trial():%0A service_types = %5Bs.subscription_type for s in all_subscriptions%5D%0A return 'yes' if SubscriptionType.EXTENDED_TRIAL in service_types else 'no'%0A%0A
return %7B
@@ -3232,16 +3232,78 @@
s.YES),%0A
+ 'is_on_extended_trial_plan': _is_on_extended_trial(),%0A
%7D%0A%0A%0A
|
10a8946a18c953d64648639b5a545ec8fa5da6e8
|
Update print_format.py
|
frappe/utils/print_format.py
|
frappe/utils/print_format.py
|
from __future__ import unicode_literals
import frappe, os, copy, json, re
from frappe import _
from frappe.modules import get_doc_path
from jinja2 import TemplateNotFound
from frappe.utils import cint, strip_html
from frappe.utils.pdf import get_pdf,cleanup
import cups
from PyPDF2 import PdfFileWriter, PdfFileReader
no_cache = 1
no_sitemap = 1
base_template_path = "templates/www/printview.html"
standard_format = "templates/print_formats/standard.html"
@frappe.whitelist()
def download_multi_pdf(doctype, name, format=None):
# name can include names of many docs of the same doctype.
import json
result = json.loads(name)
# Concatenating pdf files
output = PdfFileWriter()
for i, ss in enumerate(result):
output = frappe.get_print(doctype, ss, format, as_pdf = True, output = output)
frappe.local.response.filename = "{doctype}.pdf".format(doctype=doctype.replace(" ", "-").replace("/", "-"))
frappe.local.response.filecontent = read_multi_pdf(output)
frappe.local.response.type = "download"
def read_multi_pdf(output):
# Get the content of the merged pdf files
fname = os.path.join("/tmp", "frappe-pdf-{0}.pdf".format(frappe.generate_hash()))
output.write(open(fname,"wb"))
with open(fname, "rb") as fileobj:
filedata = fileobj.read()
return filedata
@frappe.whitelist()
def download_pdf(doctype, name, format=None, doc=None, no_letterhead=0):
html = frappe.get_print(doctype, name, format, doc=doc, no_letterhead=no_letterhead)
frappe.local.response.filename = "{name}.pdf".format(name=name.replace(" ", "-").replace("/", "-"))
frappe.local.response.filecontent = get_pdf(html)
frappe.local.response.type = "download"
@frappe.whitelist()
def report_to_pdf(html, orientation="Landscape"):
frappe.local.response.filename = "report.pdf"
frappe.local.response.filecontent = get_pdf(html, {"orientation": orientation})
frappe.local.response.type = "download"
@frappe.whitelist()
def print_by_server(doctype, name, format=None, doc=None, no_letterhead=0):
print_settings = frappe.get_doc("Print Settings")
try:
cups.setServer(print_settings.server_ip)
cups.setPort(print_settings.port)
conn = cups.Connection()
output = PdfFileWriter()
output = frappe.get_print(doctype, name, format, doc=doc, no_letterhead=no_letterhead, as_pdf = True, output = output)
file = os.path.join("/tmp", "frappe-pdf-{0}.pdf".format(frappe.generate_hash()))
output.write(open(file,"wb"))
conn.printFile("Generic-text-only",file , name, {})
except IOError as e:
if ("ContentNotFoundError" in e.message
or "ContentOperationNotPermittedError" in e.message
or "UnknownContentError" in e.message
or "RemoteHostClosedError" in e.message):
frappe.throw(_("PDF generation failed"))
except cups.IPPError:
frappe.throw(_("Unsupported document-format 'application/pdf'."))
finally:
cleanup(file,{})
|
Python
| 0.000011
|
@@ -2434,27 +2434,35 @@
ile(
-%22Generic-text-only%22
+print_settings.printer_name
,fil
@@ -2769,54 +2769,23 @@
(_(%22
-Unsupported document-format 'application/pdf'.
+Printing failed
%22))%0A
|
86422fd200726ceaa6ffd06cd2d995f1d068efd9
|
input_dropout_ratio must be [0,1)
|
py/testdir_single_jvm/test_NN2_params_rand2.py
|
py/testdir_single_jvm/test_NN2_params_rand2.py
|
import unittest, random, sys, time
sys.path.extend(['.','..','py'])
import h2o, h2o_cmd, h2o_hosts, h2o_glm, h2o_import as h2i, h2o_nn
def define_params():
paramDict = {
'destination_key' : [None, 'NN2_model'],
'ignored_cols' : [None, 0, 1, '0,1'],
'classification' : [None, 0, 1],
'validation' : [None, 'covtype.20k.hex'],
# 'mode' : [None, 'SingleNode', 'SingleThread', 'MapReduce'],
'activation' : [None, 'Tanh', 'TanhWithDropout', 'Rectifier', 'RectifierWithDropout',
'Maxout', 'MaxoutWithDropout'],
'input_dropout_ratio' : [None, 0, 1],
'hidden' : [None, 1, '100,50'],
'adaptive_rate' : [None, 0, 1],
'rate' : [None, 0.005, 0.010],
'rate_annealing' : [None, 0, 1e-6, 1e-4],
'momentum_start' : [None, 0, 0.1, 0.5, 0.9999],
'momentum_ramp' : [None, 1, 10000, 1000000],
'momentum_stable' : [None, 0, 0.9, 0.8],
'max_w2' : [None, 5, 10, 'Infinity'],
'l1' : [None, 0, 1e-4],
'l2' : [None, 0, 1e-4, 0.5],
'seed' : [None, 0, 1, 5234234],
'initial_weight_distribution' : [None, 'UniformAdaptive', 'Uniform', 'Normal'],
'initial_weight_scale' : [None, 0, 1],
'rate_decay' : [None, 0, 1],
'epochs' : [0.001, 2],
'score_training_samples' : [None, 0, 1],
'score_validation_samples' : [None, 0, 1],
'score_interval' : [None, 0, 1],
'train_samples_per_iteration' : [None, 0, 1],
'diagnostics' : [None, 0, 1],
'force_load_balance' : [None, 0, 1],
'replicate_training_data' : [None, 0, 1],
'shuffle_training_data' : [None, 0, 1],
'score_duty_cycle' : [None, 0.1, 0.01],
'fast_mode' : [None, 0, 1],
'ignore_const_cols' : [None, 0, 1],
'shuffle_training_data' : [None, 0, 1],
'nesterov_accelerated_gradient': [None, 0, 1],
# 'warmup_samples' : [None, 0, 10],
}
return paramDict
class Basic(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
global SEED, localhost
SEED = h2o.setup_random_seed()
localhost = h2o.decide_if_localhost()
if (localhost):
h2o.build_cloud(node_count=1)
else:
h2o_hosts.build_cloud_with_hosts(node_count=1)
@classmethod
def tearDownClass(cls):
h2o.tear_down_cloud()
def test_NN2_params_rand2(self):
h2o.beta_features = True
csvPathname = 'covtype/covtype.20k.data'
hex_key = 'covtype.20k.hex'
parseResult = h2i.import_parse(bucket='smalldata', path=csvPathname, hex_key=hex_key, schema='put')
paramDict = define_params()
for trial in range(3):
# params is mutable. This is default.
params = {'response': 'C55', 'epochs': '1'}
h2o_nn.pickRandDeepLearningParams(paramDict, params)
kwargs = params.copy()
start = time.time()
nn = h2o_cmd.runDeepLearning(timeoutSecs=500, parseResult=parseResult, **kwargs)
print "nn result:", h2o.dump_json(nn)
h2o.check_sandbox_for_errors()
deeplearning_model = nn['deeplearning_model']
errors = deeplearning_model['errors']
# print "errors", h2o.dump_json(errors)
# print "errors, classification", errors['classification']
# assert 1==0
# unstable = nn['model_info']['unstable']
# unstable case caused by :
# normal initial distribution with amplitude 1 and input_dropout_ratio=1.
# blowing up numerically during propagation of all zeroes as input repeatedly.
# arnon added logging to stdout in addition to html in 7899b92ad67.
# Will have to check that first before making predictions.
# print "unstable:", unstable
# FIX! simple check?
print "Deep Learning end on ", csvPathname, 'took', time.time() - start, 'seconds'
print "Trial #", trial, "completed\n"
if __name__ == '__main__':
h2o.unit_main()
|
Python
| 0.999999
|
@@ -743,35 +743,57 @@
: %5BNone, 0,
-1%5D,
+0.5, .99%5D, # 1 is illegal
%0A 'hidden
|
ab2d635f6f52c6cbc6c59d3fa887176852e186ff
|
Move Ko-Fi notifications to private channels.
|
KofiFriend_Brain.py
|
KofiFriend_Brain.py
|
import traceback
import json
import util_functions
from discord.ext import commands
import discord
import sys
import re
import os
import asyncio
from aiohttp import web
import datetime
botToken = os.environ.get('botToken')
def run_app(app, *, host='0.0.0.0', port=None, shutdown_timeout=60.0, ssl_context=None, print=print, backlog=128):
"""Run an app"""
if port is None:
if not ssl_context:
port = 8080
else:
port = 8443
loop = app.loop
handler = app.make_handler()
server = loop.create_server(handler, host, port, ssl=ssl_context, backlog=backlog)
srv, startup_res = loop.run_until_complete(asyncio.gather(server, app.startup(), loop=loop))
scheme = 'https' if ssl_context else 'http'
print("======== Running on {scheme}://{host}:{port}/ ========\n"
"(Press CTRL+C to quit)".format(
scheme=scheme, host=host, port=port))
async def tba_handler(request):
data = await request.post()
data = json.loads(data['data'])
print("Accepted request:\n{}".format(data))
print("{}".format(data))
embed = discord.Embed(
title="Crooq's Computer Quest Updated!",
url="https://ko-fi.com/eylesis",
description="{} has given ${} to the cause! The donation is appreciated!".format(data['from_name'], data['amount']))
embed.set_footer(text="Ko-Fi Notification")
if data['message'] == "":
data['message'] == "No Message."
embed.add_field(name="__Message__", value=data['message'])
channelids = {'470455397912674305', '391157967493267457'}
for channelid in channelids:
await bot.send_message(bot.get_channel(channelid), embed=embed)
return web.Response()
bot = commands.Bot(command_prefix='*')
loop = bot.loop
app = web.Application(loop=loop)
app.router.add_post('/endpoint', tba_handler)
if __name__ == "__main__":
run_app(app, host=os.environ.get('HOST'), port=os.environ.get('PORT'))
bot.run(botToken)
|
Python
| 0
|
@@ -1137,36 +1137,20 @@
le=%22
-Crooq's Computer Quest Updat
+Ko-Fi Receiv
ed!%22
@@ -1226,60 +1226,17 @@
has
-giv
+s
en
+t
$%7B%7D
- to the cause! The donation is appreciated!
+.
%22.fo
@@ -1506,30 +1506,8 @@
305'
-, '391157967493267457'
%7D%0A
|
b980d69fe3d2da87814a915c6a85ef930d832860
|
Change simple_blend to simply average the predictions
|
scripts/simple_blend.py
|
scripts/simple_blend.py
|
import numpy as np
import os
import sys
sys.path.append(os.path.abspath(os.path.dirname(os.path.dirname(__file__))))
from utils.data_paths import SUBMISSIONS_DIR_PATH
OUTPUT_FILE_PATH = os.path.join(SUBMISSIONS_DIR_PATH, 'simple_blend.dta')
PREDICTION_FILE_PATHS = [os.path.join(SUBMISSIONS_DIR_PATH, 'predictions1.dta'),
os.path.join(SUBMISSIONS_DIR_PATH, 'predictions2.dta')]
PREDICTION_COEFFICIENTS = [0.4,
0.6]
def main():
predictions = get_predictions()
write(predictions)
def get_predictions():
predictions = np.array([])
for i, prediction_file_path in enumerate(PREDICTION_FILE_PATHS):
with open(prediction_file_path, 'r') as prediction_file:
prediction = np.transpose(np.array([prediction_file.read().split()],
dtype=np.float32))
if predictions.size == 0:
predictions = prediction
else:
predictions = np.append(predictions, prediction, axis=1)
return np.matrix(predictions)
def write(predictions):
coefficients = np.array(PREDICTION_COEFFICIENTS)
with open(OUTPUT_FILE_PATH, 'w+') as output_file:
for prediction_set in predictions:
prediction = np.dot(np.ravel(prediction_set), coefficients)
output_file.write('{}\n'.format(prediction))
if __name__ == '__main__':
main()
|
Python
| 0.000001
|
@@ -403,72 +403,8 @@
')%5D%0A
-PREDICTION_COEFFICIENTS = %5B0.4,%0A 0.6%5D%0A
%0A%0Ade
@@ -1043,61 +1043,8 @@
s):%0A
- coefficients = np.array(PREDICTION_COEFFICIENTS)%0A
@@ -1168,11 +1168,15 @@
np.
-dot
+average
(np.
@@ -1200,22 +1200,8 @@
set)
-, coefficients
)%0A
|
4657a4fafb1218fe73b76d142c554bd8f347d81f
|
Make the correct None check
|
regserver/regulations/views/chrome.py
|
regserver/regulations/views/chrome.py
|
from django.conf import settings
from django.http import Http404
from django.views.generic.base import TemplateView
from regulations.generator import generator
from regulations.generator.versions import fetch_grouped_history
from regulations.views import utils
from regulations.views.partial import *
from regulations.views.sidebar import SideBarView
class ChromeView(TemplateView):
""" Base class for views which wish to include chrome. """
template_name = 'chrome.html'
def add_extras(self, context):
context['env'] = 'source' if settings.DEBUG else 'built'
context['GOOGLE_ANALYTICS_SITE'] = settings.GOOGLE_ANALYTICS_SITE
context['GOOGLE_ANALYTICS_ID'] = settings.GOOGLE_ANALYTICS_ID
return context
def get_context_data(self, **kwargs):
context = super(ChromeView, self).get_context_data(**kwargs)
label_id = context['label_id']
version = context['version']
# Hack solution: pull in full regulation, then the partial
# @todo: just query the meta and toc layers
part = label_id.split('-')[0]
full_tree = generator.get_regulation(part, version)
relevant_tree = generator.get_tree_paragraph(label_id, version)
if not full_tree or relevant_tree:
raise Http404
partial_view = self.partial_class.as_view()
response = partial_view(
self.request, label_id=label_id, version=version)
response.render()
context['partial_content'] = response.content
sidebar_view = SideBarView.as_view()
response = sidebar_view(self.request, label_id=label_id,
version=version)
response.render()
context['sidebar_content'] = response.content
appliers = utils.handle_specified_layers(
'toc,meta', part, version, self.partial_class.sectional_links)
builder = generate_html(full_tree, appliers)
context['tree'] = full_tree
self.add_extras(context)
context['part'] = part
context['history'] = fetch_grouped_history(part)
return context
class ChromeInterpView(ChromeView):
"""Interpretation of regtext section/paragraph or appendix with chrome"""
partial_class = PartialInterpView
class ChromeSectionView(ChromeView):
"""Regtext section with chrome"""
partial_class = PartialSectionView
class ChromeParagraphView(ChromeView):
"""Regtext paragraph with chrome"""
partial_class = PartialParagraphView
class ChromeRegulationView(ChromeView):
"""Entire regulation with chrome"""
partial_class = PartialRegulationView
|
Python
| 0.999589
|
@@ -1243,12 +1243,8 @@
if
-not
full
@@ -1249,16 +1249,24 @@
ll_tree
+is None
or relev
@@ -1273,16 +1273,24 @@
ant_tree
+ is None
:%0A
|
e09d1f7f3b078b6de0f87b05939d776bc43ee483
|
Comment about hack
|
pax/pax.py
|
pax/pax.py
|
import logging
import inspect
import pprint
import configparser
from pax import units
import os
from pluginbase import PluginBase
from pax import units
def EvaluateConfiguration(config):
evaled_config = {}
for key, value in config.items():
evaled_config[key] = eval(value, {
name : getattr(units, name)
for name in dir(units)
})
return evaled_config
def Instantiate(name, plugin_source, config_values):
"""take class name and build class from it"""
name_module, name_class = name.split('.')
plugin_module = plugin_source.load_plugin(name_module)
if config_values.has_section(name):
this_config = config_values[name]
else:
this_config = config_values['DEFAULT']
this_config = EvaluateConfiguration(this_config)
return getattr(plugin_module, name_class)(this_config)
def Processor(input, transform, output):
# Check input types
# TODO (tunnell): Don't use asserts, but raise ValueError() with
# informative error
assert isinstance(input, str)
assert isinstance(transform, (str, list))
assert isinstance(output, (str, list))
# If 'transform' or 'output' aren't lists, turn them into lists
if not isinstance(transform, list):
transform = [transform]
if not isinstance(output, list):
output = [output]
# What we do on data...
actions = transform + output
# Find location of this file
absolute_path = os.path.abspath(inspect.getfile(inspect.currentframe()))
dir = os.path.dirname(absolute_path)
interpolation = configparser.ExtendedInterpolation()
config = configparser.ConfigParser(interpolation=interpolation,
inline_comment_prefixes='#',
strict=True)
# Allow for case-sensitive configuration keys
config.optionxform = str
# Load the default configuration
config.read(os.path.join(dir, 'default.ini'))
default_config = EvaluateConfiguration(config['DEFAULT'])
# Setup logging
string_level = default_config['loglevel']
numeric_level = getattr(logging, string_level.upper(), None)
if not isinstance(numeric_level, int):
raise ValueError('Invalid log level: %s' % string_level)
FORMAT = '%(asctime)-15s %(name)s L%(lineno)s - %(levelname)s %(message)s'
logging.basicConfig(level=numeric_level, format=FORMAT)
log = logging.getLogger('Processor')
# Print settings to log
log.debug(pprint.pformat(config, compact=True))
# Setup plugins (which involves finding the plugin directory.
plugin_base = PluginBase(package='pax.plugins')
searchpath = ['./plugins'] + config['DEFAULT']['plugin_paths'].split()
# Find the absolute path, then director, then find plugin directory
searchpath += [os.path.join(dir, '..', 'plugins')]
log.debug("Search path for plugins is %s" % str(searchpath))
plugin_source = plugin_base.make_plugin_source(searchpath=searchpath)
# Instantiate requested plugins
input = Instantiate(input, plugin_source, config)
actions = [Instantiate(x, plugin_source, config) for x in actions]
# This is the *actual* event loop
for i, event in enumerate(input.GetEvents()):
log.info("Event %d" % i)
for j, block in enumerate(actions):
log.debug("Step %d with %s", j, block.__class__.__name__)
event = block.ProcessEvent(event)
|
Python
| 0
|
@@ -245,16 +245,76 @@
tems():%0A
+ #Eval value with globals = everything from units...%0A
|
f5b3d351a52d32b5485bf1a0766d8eb2ab213eea
|
Update views.py
|
app/auth/views.py
|
app/auth/views.py
|
from flask import render_template, \
redirect, request, url_for, flash
from flask.ext.login import login_user, \
logout_user, login_required, current_user
from . import auth
from app import db
from ..models import User, Notebook
from ..email import send_email
from .forms import LoginForm, RegistrationForm, \
ChangePasswordForm, PasswordResetRequestForm, \
PasswordResetForm, ChangeEmailForm
@auth.before_app_request
def before_request():
if current_user.is_authenticated() \
and not current_user.confirmed \
and request.endpoint[:5] != 'auth.' \
and request.endpoint != 'static':
return redirect(url_for('auth.unconfirmed'))
@auth.route('/login', methods=['GET', 'POST'])
def login():
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data.lower().strip()).first()
if user is not None and user.verify_password(form.password.data):
login_user(user, form.remember_me.data)
return redirect(request.args.get('next') or url_for('main.index'))
flash('Invalid username or password')
return render_template('auth/login.html', form=form)
@auth.route('/logout')
@login_required
def logout():
logout_user()
flash('You have been logged out.')
return redirect(url_for('main.index'))
@auth.route('/register', methods=['GET', 'POST'])
def register():
form = RegistrationForm()
if form.validate_on_submit():
user = User(
email=form.email.data.lower().strip(),
username=form.username.data,
password=form.password.data)
db.session.add(user)
db.session.commit()
default_notebook = Notebook(
title='Default', author_id=user.id)
db.session.add(default_notebook)
db.session.commit()
token = user.generate_confirmation_token()
send_email(
user.email, 'Confirm Your Account',
'auth/email/confirm', user=user, token=token)
flash('A confirmation email has been sent.')
return redirect(url_for('main.index'))
return render_template('auth/register.html', form=form)
@auth.route('/confirm')
@login_required
def resend_confirmation():
token = current_user.generate_confirmation_token()
send_email(
current_user.email, 'Confirm Your Account',
'/auth/email/confirm', user=current_user, token=token)
flash('A new confirmation email has been sent.')
return redirect(url_for('main.index'))
@auth.route('/confirm/<token>')
@login_required
def confirm(token):
if current_user.confirmed:
return redirect(url_for('main.index'))
if current_user.confirm(token):
flash('You have confirmed your account. Thanks!')
else:
flash('The confirmation link is invalid or has expired.')
return redirect(url_for('main.index'))
@auth.route('/unconfirmed')
def unconfirmed():
if current_user.is_anonymous() or current_user.confirmed:
return redirect(url_for('main.index'))
return render_template('auth/unconfirmed.html')
@auth.route('/change-password', methods=['GET', 'POST'])
@login_required
def change_password():
form = ChangePasswordForm()
if form.validate_on_submit():
if current_user.verify_password(form.old_password.data):
current_user.password = form.password.data
db.session.add(current_user)
db.session.commit()
flash('Your password has been updated.')
return redirect(url_for('main.index'))
else:
flash('Invalid password.')
return render_template("auth/change_password.html", form=form)
@auth.route('/reset', methods=['GET', 'POST'])
def password_reset_request():
if not current_user.is_anonymous:
return redirect(url_for('main.index'))
form = PasswordResetRequestForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data.lower().strip()).first()
if user:
token = user.generate_reset_token()
send_email(user.email, 'Reset Your Password',
'auth/email/reset_password',
user=user, token=token,
next=request.args.get('next'))
flash('An email with instructions to reset your password has been '
'sent to you.')
return redirect(url_for('auth.login'))
return render_template('auth/reset_password.html', form=form)
@auth.route('/reset/<token>', methods=['GET', 'POST'])
def password_reset(token):
if not current_user.is_anonymous:
return redirect(url_for('main.index'))
form = PasswordResetForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data.lower().strip()).first()
if user is None:
return redirect(url_for('main.index'))
if user.reset_password(token, form.password.data):
flash('Your password has been updated.')
return redirect(url_for('auth.login'))
else:
return redirect(url_for('main.index'))
return render_template('auth/reset_password.html', form=form)
@auth.route('/change-email', methods=['GET', 'POST'])
@login_required
def change_email_request():
form = ChangeEmailForm()
if form.validate_on_submit():
if current_user.verify_password(form.password.data):
new_email = form.email.data.lower().strip()
token = current_user.generate_email_change_token(new_email)
send_email(new_email, 'Confirm your email address',
'auth/email/change_email',
user=current_user, token=token)
flash('An email with instructions to confirm your new email '
'address has been sent to you.')
return redirect(url_for('main.index'))
else:
flash('Invalid email or password.')
return render_template("auth/change_email.html", form=form)
@auth.route('/change-email/<token>')
@login_required
def change_email(token):
if current_user.change_email(token):
flash('Your email address has been updated.')
else:
flash('Invalid request.')
return redirect(url_for('main.index'))
|
Python
| 0
|
@@ -2571,32 +2571,16 @@
oken%3E')%0A
-@login_required%0A
def conf
|
2779fdd14279e017f35780fe343b1c7243898397
|
Fix extension description and remove unused exception
|
neutron/extensions/l3_ext_gw_mode.py
|
neutron/extensions/l3_ext_gw_mode.py
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 Nicira Networks, Inc.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Salvatore Orlando, Nicira, Inc
#
from neutron.api import extensions
from neutron.api.v2 import attributes as attrs
from neutron.common import exceptions as qexception
from neutron.extensions import l3
class RouterDNatDisabled(qexception.BadRequest):
message = _("DNat is disabled for the router %(router_id)s. Floating IPs "
"cannot be associated.")
EXTENDED_ATTRIBUTES_2_0 = {
'routers': {l3.EXTERNAL_GW_INFO:
{'allow_post': True,
'allow_put': True,
'is_visible': True,
'default': None,
'enforce_policy': True,
'validate':
{'type:dict_or_nodata':
{'network_id': {'type:uuid': None, 'required': True},
'enable_snat': {'type:boolean': None, 'required': False,
'convert_to': attrs.convert_to_boolean}}
}}}}
class L3_ext_gw_mode(extensions.ExtensionDescriptor):
@classmethod
def get_name(cls):
return "Neutron L3 Configurable external gateway mode"
@classmethod
def get_alias(cls):
return "ext-gw-mode"
@classmethod
def get_description(cls):
return ("Extension of the router abstraction for specifying whether "
"SNAT, DNAT or both should occur on the external gateway")
@classmethod
def get_namespace(cls):
return "http://docs.openstack.org/ext/neutron/ext-gw-mode/api/v1.0"
@classmethod
def get_updated(cls):
return "2013-03-28T10:00:00-00:00"
def get_required_extensions(self):
return ["router"]
def get_extended_resources(self, version):
if version == "2.0":
return dict(EXTENDED_ATTRIBUTES_2_0.items())
else:
return {}
|
Python
| 0
|
@@ -820,251 +820,29 @@
ron.
-common import exceptions as qexception%0Afrom neutron.extensions import l3%0A%0A%0Aclass RouterDNatDisabled(qexception.BadRequest):%0A message = _(%22DNat is disabled for the router %25(router_id)s. Floating IPs %22%0A %22cannot be associated.%22)
+extensions import l3%0A
%0A%0AEX
@@ -1788,22 +1788,8 @@
SNAT
-, DNAT or both
sho
|
e953457bf21713931182a976886cbe31405e1de1
|
Fix payment transactions target field
|
payment.py
|
payment.py
|
# -*- coding: utf-8 -*-
"""
payment
:copyright: (c) 2014 by Openlabs Technologies & Consulting (P) Limited
:license: BSD, see LICENSE for more details.
"""
from decimal import Decimal
from trytond.model import ModelSQL, ModelView, fields
from trytond.pool import PoolMeta, Pool
from trytond.pyson import Eval, Not
__all__ = ['Payment']
__metaclass__ = PoolMeta
class Payment(ModelSQL, ModelView):
'Payment'
__name__ = 'sale.payment'
sequence = fields.Integer('Sequence', required=True, select=True)
party = fields.Function(
fields.Many2One('party.party', 'Party'),
getter='on_change_with_party'
)
amount = fields.Numeric(
'Amount', required=True, digits=(16, Eval('currency_digits', 2)),
depends=['currency_digits'],
)
sale = fields.Many2One(
'sale.sale', 'Sale', required=True, select=True, ondelete='CASCADE'
)
gateway = fields.Many2One(
'payment_gateway.gateway', 'Gateway', required=True,
ondelete='RESTRICT', select=True,
)
payment_transactions = fields.One2Many(
'payment_gateway.transaction', 'payment', 'Payment Transactions',
readonly=True
)
currency_digits = fields.Function(
fields.Integer('Currency Digits'),
getter='on_change_with_currency_digits'
)
amount_consumed = fields.Function(
fields.Numeric(
'Amount Consumed', digits=(16, Eval('currency_digits', 2)),
depends=['currency_digits'],
), 'get_amount'
)
amount_available = fields.Function(
fields.Numeric(
'Amount Remaining', digits=(16, Eval('currency_digits', 2)),
depends=['currency_digits'],
), 'get_amount'
)
amount_authorized = fields.Function(
fields.Numeric(
'Amount Authorized', digits=(16, Eval('currency_digits', 2)),
depends=['currency_digits'],
), 'get_amount'
)
amount_captured = fields.Function(
fields.Numeric(
'Amount Captured', digits=(16, Eval('currency_digits', 2)),
depends=['currency_digits'],
), 'get_amount'
)
payment_profile = fields.Many2One(
'party.payment_profile', 'Payment Profile',
domain=[
('party', '=', Eval('party')),
('gateway', '=', Eval('gateway')),
],
states={
'required': Eval('method') == 'credit_card'
},
ondelete='RESTRICT', depends=['party', 'gateway'],
)
method = fields.Function(fields.Char('Payment Method'), 'get_method')
provider = fields.Function(fields.Char('Payment Provider'), 'get_provider')
reference = fields.Char(
'Reference', states={
'invisible': Not(Eval('method') == 'manual'),
}
)
def get_rec_name(self, name):
if self.payment_profile:
return "%s - %s - %s" % (
self.gateway, self.payment_profile, self.amount
)
return "%s - %s" % (self.gateway, self.amount)
def get_provider(self, name=None):
"""
Return the gateway provider based on the gateway
"""
return self.gateway and self.gateway.provider or None
def get_method(self, name=None):
"""
Return the method based on the gateway
"""
return self.gateway and self.gateway.method or None
@classmethod
def __setup__(cls):
super(Payment, cls).__setup__()
cls._error_messages.update({
'cannot_delete_payment':
"Payment cannot be deleted as placeholder for amount consumed "
"has already been consumed.",
})
@fields.depends('sale')
def on_change_with_party(self, name=None):
return self.sale and self.sale.party.id or None
@fields.depends('sale')
def on_change_with_currency_digits(self, name=None):
if self.sale.currency:
return self.sale.currency.digits
return 2
def get_amount(self, name):
"""Getter method for fetching amounts.
"""
PaymentTransaction = Pool().get('payment_gateway.transaction')
payment_transactions = PaymentTransaction.search([
('sale_payment', '=', self.id)
])
sum_transactions = lambda txns: sum((txn.amount for txn in txns))
if name == "amount_consumed":
return sum_transactions(filter(
lambda t: t.state in ('authorized', 'completed', 'posted'),
payment_transactions
))
elif name == "amount_authorized":
return sum_transactions(filter(
lambda t: t.state == 'authorized',
payment_transactions
))
elif name == "amount_captured":
return sum_transactions(filter(
lambda t: t.state in ('completed', 'posted'),
payment_transactions
))
elif name == "amount_available":
return max(self.amount - self.amount_consumed, Decimal('0'))
@staticmethod
def default_sequence():
return 10
@classmethod
def cancel(cls, payments):
"""
Cancel all payment transactions related to payment
"""
PaymentTransaction = Pool().get('payment_gateway.transaction')
payment_transactions = []
for payment in payments:
payment_transactions.extend(payment.payment_transactions)
PaymentTransaction.cancel(payment_transactions)
def _create_payment_transaction(self, amount, description):
"""Creates an active record for gateway transaction.
"""
PaymentTransaction = Pool().get('payment_gateway.transaction')
Date = Pool().get('ir.date')
return PaymentTransaction.create([{
'description': description or 'Auto charge from sale',
'date': Date.today(),
'party': self.sale.party,
'payment_profile': self.payment_profile,
'address': (
self.payment_profile and
self.payment_profile.address or self.sale.invoice_address),
'amount': self.sale.currency.round(amount),
'currency': self.sale.currency,
'gateway': self.gateway,
'sale_payment': self.id,
'provider_reference': self.reference,
}])[0]
def authorize(self, amount, description):
"""
Authorize the given amount from this transaction
"""
PaymentTransaction = Pool().get('payment_gateway.transaction')
transaction = self._create_payment_transaction(amount, description)
PaymentTransaction.authorize([transaction])
return transaction
def capture(self, amount, description):
"""
Captures the given amount from this transaction
"""
PaymentTransaction = Pool().get('payment_gateway.transaction')
transaction = self._create_payment_transaction(amount, description)
PaymentTransaction.capture([transaction])
return transaction
@classmethod
def delete(cls, payments):
"""
Delete a payment only if there is no amount consumed
"""
for payment in payments:
if payment.amount_consumed:
cls.raise_user_error("cannot_delete_payment")
super(Payment, cls).delete(payments)
|
Python
| 0.000285
|
@@ -1124,16 +1124,21 @@
tion', '
+sale_
payment'
|
382507ea22b57b042eefe0fac4a9e333e797a55d
|
Remove comment and single equals to pin the version
|
scripts/update_teams.py
|
scripts/update_teams.py
|
#!/usr/bin/env conda-execute
# conda execute
# env:
# - python
# - conda-smithy
# - pygithub 1.*
# - six
# This needs to be pinned because selectors are an issue in 1.20.2
# - conda-build =1.20.1
# channels:
# - conda-forge
# run_with: python
import argparse
import collections
import os
import six
from github import Github
import github
import yaml
from conda_build.metadata import MetaData
parser = argparse.ArgumentParser(description='Process some integers.')
parser.add_argument('feedstocks_clone', help="The location of the feedstocks directory within the conda-forge/feedstocks clone.")
args = parser.parse_args()
from conda_smithy.github import gh_token
token = gh_token()
gh = Github(token)
conda_forge = gh.get_organization('conda-forge')
teams = {team.name: team for team in conda_forge.get_teams()}
feedstocks_path = args.feedstocks_clone
def create_team(org, name, description, repos):
# PyGithub creates secret teams, and has no way of turning that off! :(
post_parameters = {
"name": name,
"description": description,
"privacy": "closed",
}
post_parameters["repo_names"] = [element._identity for element in repos]
headers, data = org._requester.requestJsonAndCheck(
"POST",
org.url + "/teams",
input=post_parameters
)
return github.Team.Team(org._requester, headers, data, completed=True)
packages_visited = set()
all_members = set()
from random import choice
superlative = ['awesome', 'slick', 'formidable', 'awe-inspiring', 'breathtaking',
'magnificent', 'wonderous', 'stunning', 'astonishing', 'superb',
'splendid', 'impressive', 'unbeatable', 'excellent', 'top', 'outstanding',
'exalted', 'standout', 'smashing']
# Go through each of the feedstocks and ensure that the team is up to date and that
# there is nobody in the team which doesn't belong (i.e. isn't in the maintainers list).
for package_name in os.listdir(feedstocks_path):
packages_visited.add(package_name)
feedstock = os.path.join(feedstocks_path, package_name)
recipe = os.path.join(feedstock, 'recipe', 'meta.yaml')
if not os.path.exists(recipe):
print("The {} feedstock is recipe less".format(package_name))
continue
meta = MetaData(os.path.dirname(recipe))
contributors = meta.meta.get('extra', {}).get('recipe-maintainers', [])
if not isinstance(contributors, list):
# Deal with a contribution list which has dashes but no spaces
# (e.g. https://github.com/conda-forge/pandoc-feedstock/issues/1)
contributors = [contributors.lstrip('-')]
contributors = set(handle.lower() for handle in contributors)
all_members.update(contributors)
# Get the github repo for this feedstock.
repo = conda_forge.get_repo('{}-feedstock'.format(package_name))
# If the team already exists, get hold of it.
team = teams.get(package_name)
if not team:
team = create_team(conda_forge, package_name.lower(),
'The {} {} contributors!'.format(choice(superlative), package_name),
[repo])
teams[package_name] = team
# Ensure that we have merge rights on the repo for this team.
url = team.url + "/repos/" + repo._identity
team._requester.requestJsonAndCheck("PUT", url, input={"permission": "push"})
current_members = team.get_members()
member_handles = set([member.login.lower() for member in current_members])
for new_member in contributors - member_handles:
headers, data = team._requester.requestJsonAndCheck(
"PUT",
team.url + "/memberships/" + new_member)
for old_member in member_handles - contributors:
print("AN OLD MEMBER ({}) NEEDS TO BE REMOVED FROM {}".format(old_member, repo._identity))
# The following works, it is just a bit scary!
# headers, data = team._requester.requestJsonAndCheck(
# "DELETE",
# team.url + "/memberships/" + old_member)
# Create and administer the all-members team.
team = teams.get('all-members')
if not team:
team = create_team(conda_forge, 'all-members',
'All of the awesome conda-forge contributors!', [])
current_members = team.get_members()
member_handles = set([member.login.lower() for member in current_members])
for new_member in all_members - member_handles:
headers, data = team._requester.requestJsonAndCheck(
"PUT",
team.url + "/memberships/" + new_member)
for old_member in member_handles - all_members:
print("AN OLD MEMBER ({}) NEEDS TO BE REMOVED FROM all-members".format(old_member))
# Remove any teams which don't belong any more (because there is no longer a feedstock).
for team_to_remove in set(teams.keys()) - set(packages_visited):
if team_to_remove in ['Core',
'conda-forge.github.io',
'all-members',
'conda-forge-webservices']:
print('Keeping ', team_to_remove)
continue
print("THE {} TEAM NEEDS TO BE REMOVED.".format(team_to_remove))
# The following works, it is just a bit scary!
# teams[team_to_remove].delete()
|
Python
| 0
|
@@ -107,75 +107,8 @@
six%0A
-# This needs to be pinned because selectors are an issue in 1.20.2%0A
# -
@@ -120,17 +120,16 @@
a-build
-=
1.20.1%0A#
|
f78672957aa1458f0d1307685f6f1c30ad2db2a2
|
Change vlan range
|
neutron_ci/ci/tests/test_ml2_ucsm.py
|
neutron_ci/ci/tests/test_ml2_ucsm.py
|
# Copyright 2014 Cisco Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Nikolay Fedotov, Cisco Systems, Inc.
import socket
import os
from ci import PARENT_FOLDER_PATH, \
NEXUS_VLAN_START, NEXUS_VLAN_END, \
NEXUS_INTF_NUM, NEXUS_IP, NEXUS_USER, NEXUS_PASSWORD
from ci.lib.test_case import BaseTestCase
TEST_LIST_FILE = os.path.join(PARENT_FOLDER_PATH, 'cisco_plugin_tests.txt')
Q_PLUGIN_EXTRA_CONF_PATH = \
'/opt/stack/networking-cisco/etc/neutron/plugins/ml2'
Q_PLUGIN_EXTRA_CONF_FILES = 'ml2_conf_cisco.ini'
LOCAL_CONF = '''
[[local|localrc]]
NEUTRON_REPO={neutron_repo}
NEUTRON_BRANCH={neutron_branch}
MYSQL_PASSWORD=nova
RABBIT_PASSWORD=nova
SERVICE_TOKEN=nova
SERVICE_PASSWORD=nova
ADMIN_PASSWORD=nova
ENABLED_SERVICES=g-api,g-reg,key,n-api,n-crt,n-obj,n-cpu,n-cond,n-sch,n-novnc,n-xvnc,n-cauth,rabbit
enable_service mysql
disable_service n-net
enable_service q-svc
enable_service q-agt
enable_service q-l3
enable_service q-dhcp
enable_service q-meta
enable_service q-lbaas
enable_service neutron
enable_service tempest
enable_plugin networking-cisco {net_cisco_repo} {net_cisco_ref}
enable_service net-cisco
LIBVIRT_TYPE=qemu
NOVA_USE_QUANTUM_API=v2
VOLUME_BACKING_FILE_SIZE=2052M
Q_PLUGIN=ml2
Q_ML2_PLUGIN_MECHANISM_DRIVERS=openvswitch,cisco_ucsm
Q_ML2_PLUGIN_TYPE_DRIVERS=vlan
ENABLE_TENANT_TUNNELS=False
Q_ML2_TENANT_NETWORK_TYPE=local
Q_PLUGIN_EXTRA_CONF_PATH=({Q_PLUGIN_EXTRA_CONF_PATH})
Q_PLUGIN_EXTRA_CONF_FILES=({Q_PLUGIN_EXTRA_CONF_FILES})
ML2_VLAN_RANGES=physnet1:{vlan_start}:{vlan_end}
PHYSICAL_NETWORK=physnet1
OVS_PHYSICAL_BRIDGE=br-eth1
TENANT_VLAN_RANGE={vlan_start}:{vlan_end}
ENABLE_TENANT_VLANS=True
API_RATE_LIMIT=False
VERBOSE=True
DEBUG=True
LOGFILE=/opt/stack/screen-logs/stack.sh.log
USE_SCREEN=True
SCREEN_LOGDIR=/opt/stack/screen-logs
RECLONE=True
[[post-config|{Q_PLUGIN_EXTRA_CONF_PATH}/{Q_PLUGIN_EXTRA_CONF_FILES}]]
[ml2_cisco_ucsm]
ucsm_ip=172.21.11.10
ucsm_username=admin
ucsm_password=Cisc0123
'''
class ML2UCSMTest(BaseTestCase):
neutron_repo = os.environ.get('NEUTRON_REPO')
neutron_ref = os.environ.get('NEUTRON_REF')
net_cisco_repo = os.environ.get('NET_CISCO_REPO')
net_cisco_ref = os.environ.get('NET_CISCO_REF')
@classmethod
def setUpClass(cls):
BaseTestCase.setUpClass()
local_conf = LOCAL_CONF.format(
neutron_repo=cls.neutron_repo,
neutron_branch=cls.neutron_ref,
net_cisco_repo=cls.net_cisco_repo,
net_cisco_ref=cls.net_cisco_ref,
Q_PLUGIN_EXTRA_CONF_PATH=Q_PLUGIN_EXTRA_CONF_PATH,
Q_PLUGIN_EXTRA_CONF_FILES=Q_PLUGIN_EXTRA_CONF_FILES,
vlan_start=NEXUS_VLAN_START, vlan_end=NEXUS_VLAN_END,
host=socket.gethostname(), port=NEXUS_INTF_NUM,
router_ip=NEXUS_IP,
username=NEXUS_USER,
password=NEXUS_PASSWORD)
cls.devstack.local_conf = local_conf
cls.devstack.clone()
def test_tempest(self):
self.assertFalse(self.devstack.stack())
self.assertFalse(self.devstack.run_tempest(TEST_LIST_FILE))
|
Python
| 0.000001
|
@@ -659,22 +659,8 @@
c.%0A%0A
-import socket%0A
impo
@@ -702,108 +702,8 @@
PATH
-, %5C%0A NEXUS_VLAN_START, NEXUS_VLAN_END, %5C%0A NEXUS_INTF_NUM, NEXUS_IP, NEXUS_USER, NEXUS_PASSWORD
%0Afro
@@ -1050,16 +1050,17 @@
branch%7D%0A
+%0A
MYSQL_PA
@@ -1471,16 +1471,17 @@
tempest%0A
+%0A
enable_p
@@ -1561,16 +1561,17 @@
t-cisco%0A
+%0A
LIBVIRT_
@@ -1928,39 +1928,23 @@
hysnet1:
-%7Bvlan_start%7D:%7Bvlan_end%7D
+100:200
%0APHYSICA
@@ -2012,31 +2012,15 @@
NGE=
-%7Bvlan_start%7D:%7Bvlan_end%7D
+100:200
%0AENA
@@ -2196,16 +2196,17 @@
NE=True%0A
+%0A
%5B%5Bpost-c
@@ -2954,16 +2954,16 @@
F_PATH,%0A
+
@@ -3021,236 +3021,8 @@
ILES
-,%0A vlan_start=NEXUS_VLAN_START, vlan_end=NEXUS_VLAN_END,%0A host=socket.gethostname(), port=NEXUS_INTF_NUM,%0A router_ip=NEXUS_IP,%0A username=NEXUS_USER,%0A password=NEXUS_PASSWORD
)%0A%0A
|
aaa6142718827ea6d568eccc75c624598b0bc9c9
|
Update __init__.py
|
pymeasure/instruments/thorlabs/__init__.py
|
pymeasure/instruments/thorlabs/__init__.py
|
#
# This file is part of the PyMeasure package.
#
# Copyright (c) 2013-2020 PyMeasure Developers
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
from .thorlabspm100usb import ThorlabsPM100USB
|
Python
| 0.000072
|
@@ -1197,8 +1197,53 @@
M100USB%0A
+from .thorlabspro8000 import thorlabsPro8000%0A
|
d79a914cc093b4e1bb8fb87d8d85d5657097f37f
|
Remove explicit puppet version from kickstart metadata
|
nailgun/nailgun/orchestrator/provisioning_serializers.py
|
nailgun/nailgun/orchestrator/provisioning_serializers.py
|
# -*- coding: utf-8 -*-
# Copyright 2013 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Provisioning serializers for orchestrator"""
from nailgun.logger import logger
from nailgun.network.manager import NetworkManager
from nailgun.settings import settings
from nailgun.task.helpers import TaskHelper
class ProvisioningSerializer(object):
"""Provisioning serializer"""
@classmethod
def serialize(cls, cluster, nodes):
"""Serialize cluster for provisioning."""
cluster_attrs = cluster.attributes.merged_attrs_values()
serialized_nodes = cls.serialize_nodes(cluster_attrs, nodes)
return {
'engine': {
'url': settings.COBBLER_URL,
'username': settings.COBBLER_USER,
'password': settings.COBBLER_PASSWORD},
'nodes': serialized_nodes}
@classmethod
def serialize_nodes(cls, cluster_attrs, nodes):
"""Serialize nodes."""
serialized_nodes = []
for node in nodes:
serialized_node = cls.serialize_node(cluster_attrs, node)
serialized_nodes.append(serialized_node)
return serialized_nodes
@classmethod
def serialize_node(cls, cluster_attrs, node):
"""Serialize a single node."""
serialized_node = {
'uid': node.uid,
'power_address': node.ip,
'name': TaskHelper.make_slave_name(node.id),
'hostname': node.fqdn,
'power_pass': cls.get_ssh_key_path(node),
'profile': cluster_attrs['cobbler']['profile'],
'power_type': 'ssh',
'power_user': 'root',
'name_servers': '\"%s\"' % settings.DNS_SERVERS,
'name_servers_search': '\"%s\"' % settings.DNS_SEARCH,
'netboot_enabled': '1',
'kernel_options': {
'netcfg/choose_interface': node.admin_interface.name,
'udevrules': cls.interfaces_mapping_for_udev(node)},
'ks_meta': {
'ks_spaces': node.attributes.volumes,
'puppet_auto_setup': 1,
'puppet_master': settings.PUPPET_MASTER_HOST,
'puppet_version': settings.PUPPET_VERSION,
'puppet_enable': 0,
'mco_auto_setup': 1,
'install_log_2_syslog': 1,
'mco_pskey': settings.MCO_PSKEY,
'mco_vhost': settings.MCO_VHOST,
'mco_host': settings.MCO_HOST,
'mco_user': settings.MCO_USER,
'mco_password': settings.MCO_PASSWORD,
'mco_connector': settings.MCO_CONNECTOR,
'mco_enable': 1,
'auth_key': "\"%s\"" % cluster_attrs.get('auth_key', '')}}
serialized_node.update(cls.serialize_interfaces(node))
return serialized_node
@classmethod
def serialize_interfaces(cls, node):
interfaces = {}
interfaces_extra = {}
net_manager = NetworkManager
admin_ips = net_manager.get_admin_ips_for_interfaces(node)
admin_netmask = net_manager.get_admin_network_group().netmask
for interface in node.interfaces:
name = interface.name
interfaces[name] = {
'mac_address': interface.mac,
'static': '0',
'netmask': admin_netmask,
'ip_address': admin_ips[name]}
# interfaces_extra field in cobbler ks_meta
# means some extra data for network interfaces
# configuration. It is used by cobbler snippet.
# For example, cobbler interface model does not
# have 'peerdns' field, but we need this field
# to be configured. So we use interfaces_extra
# branch in order to set this unsupported field.
interfaces_extra[name] = {
'peerdns': 'no',
'onboot': 'no'}
# We want node to be able to PXE boot via any of its
# interfaces. That is why we add all discovered
# interfaces into cobbler system. But we want
# assignted fqdn to be resolved into one IP address
# because we don't completely support multiinterface
# configuration yet.
if interface.mac == node.mac:
interfaces[name]['dns_name'] = node.fqdn
interfaces_extra[name]['onboot'] = 'yes'
return {
'interfaces': interfaces,
'interfaces_extra': interfaces_extra}
@classmethod
def interfaces_mapping_for_udev(cls, node):
"""Serialize interfaces mapping for cobbler
:param node: node model
:returns: returns string, example:
00:02:03:04:04_eth0,00:02:03:04:05_eth1
"""
return ','.join((
'{0}_{1}'.format(i.mac, i.name) for i in node.interfaces))
@classmethod
def get_ssh_key_path(cls, node):
"""Assign power pass depend on node state."""
if node.status == "discover":
logger.info(
u'Node %s seems booted with bootstrap image', node.full_name)
return settings.PATH_TO_BOOTSTRAP_SSH_KEY
logger.info(u'Node %s seems booted with real system', node.full_name)
return settings.PATH_TO_SSH_KEY
def serialize(cluster, nodes):
"""Serialize cluster for provisioning."""
TaskHelper.prepare_for_provisioning(nodes)
return ProvisioningSerializer.serialize(cluster, nodes)
|
Python
| 0.000143
|
@@ -2705,67 +2705,8 @@
ST,%0A
- 'puppet_version': settings.PUPPET_VERSION,%0A
|
1efaeb3e3d26848dc0338b9720ea7ba4e496a914
|
Fix test: use assertItemsEqual instead of assertEquals
|
nailgun/nailgun/test/test_cluster_collection_handlers.py
|
nailgun/nailgun/test/test_cluster_collection_handlers.py
|
# -*- coding: utf-8 -*-
import json
import unittest
from paste.fixture import TestApp
from mock import patch
from nailgun.api.models import Release, Network
from nailgun.test.base import BaseHandlers
from nailgun.test.base import reverse
class TestHandlers(BaseHandlers):
def test_cluster_list_empty(self):
resp = self.app.get(
reverse('ClusterCollectionHandler'),
headers=self.default_headers
)
self.assertEquals(200, resp.status)
response = json.loads(resp.body)
self.assertEquals([], response)
def test_cluster_create(self):
release_id = self.env.create_release(api=False).id
resp = self.app.post(
reverse('ClusterCollectionHandler'),
json.dumps({
'name': 'cluster-name',
'release': release_id,
}),
headers=self.default_headers
)
self.assertEquals(201, resp.status)
def test_if_cluster_creates_correct_networks(self):
release = Release()
release.version = "1.1.1"
release.name = u"release_name_" + str(release.version)
release.description = u"release_desc" + str(release.version)
release.networks_metadata = [
{"name": "floating", "access": "public"},
{"name": "fixed", "access": "private10"},
{"name": "storage", "access": "private192"},
{"name": "management", "access": "private172"},
{"name": "other_172", "access": "private172"},
]
release.attributes_metadata = {
"editable": {
"keystone": {
"admin_tenant": "admin"
}
},
"generated": {
"mysql": {
"root_password": ""
}
}
}
self.db.add(release)
self.db.commit()
resp = self.app.post(
reverse('ClusterCollectionHandler'),
json.dumps({
'name': 'cluster-name',
'release': release.id,
}),
headers=self.default_headers
)
self.assertEquals(201, resp.status)
nets = self.db.query(Network).all()
obtained = []
for net in nets:
obtained.append({
'release': net.release,
'name': net.name,
'access': net.access,
'vlan_id': net.vlan_id,
'cidr': net.cidr,
'gateway': net.gateway
})
expected = [
{
'release': release.id,
'name': u'floating',
'access': 'public',
'vlan_id': 100,
'cidr': '240.0.0.0/24',
'gateway': '240.0.0.1'
},
{
'release': release.id,
'name': u'fixed',
'access': 'private10',
'vlan_id': 101,
'cidr': '10.0.0.0/24',
'gateway': '10.0.0.1'
},
{
'release': release.id,
'name': u'storage',
'access': 'private192',
'vlan_id': 102,
'cidr': '192.168.0.0/24',
'gateway': '192.168.0.1'
},
{
'release': release.id,
'name': u'management',
'access': 'private172',
'vlan_id': 103,
'cidr': '172.16.0.0/24',
'gateway': '172.16.0.1'
},
{
'release': release.id,
'name': u'other_172',
'access': 'private172',
'vlan_id': 104,
'cidr': '172.16.1.0/24',
'gateway': '172.16.1.1'
},
]
self.assertEquals(expected, obtained)
def test_network_validation_on_cluster_creation(self):
cluster = self.env.create_cluster(api=True)
nets = self.env.generate_ui_networks(cluster["id"])
nets['networks'][-1]["network_size"] = 16
nets['networks'][-1]["amount"] = 3
resp = self.app.put(
reverse('NetworkConfigurationHandler',
kwargs={'cluster_id': cluster['id']}),
json.dumps(nets),
headers=self.default_headers,
expect_errors=True
)
self.assertEquals(400, resp.status)
@patch('nailgun.rpc.cast')
def test_verify_networks(self, mocked_rpc):
cluster = self.env.create_cluster(api=True)
resp = self.app.put(
reverse('NetworkConfigurationHandler',
kwargs={'cluster_id': cluster['id']}),
json.dumps(self.env.generate_ui_networks(cluster["id"])),
headers=self.default_headers
)
self.assertEquals(resp.status, 202)
|
Python
| 0.000159
|
@@ -3882,22 +3882,26 @@
f.assert
+Items
Equal
-s
(expecte
|
fa82a17c61698847904fa7dea14414b30b80bdfc
|
Update InferenceContext attribute documentation
|
astroid/context.py
|
astroid/context.py
|
# Copyright (c) 2015-2016 Cara Vinson <ceridwenv@gmail.com>
# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""Various context related utilities, including inference and call contexts."""
import contextlib
import copy
import pprint
class InferenceContext(object):
"""Provide context for inference
Store already inferred nodes to save time
Account for already visited nodes to infinite stop infinite recursion
"""
__slots__ = ('path', 'lookupname', 'callcontext', 'boundnode', 'inferred')
def __init__(self, path=None, inferred=None):
self.path = path or set()
"""Path of visited nodes and their lookupname
:type: set(tuple(NodeNG, optional(str)))"""
self.lookupname = None
self.callcontext = None
self.boundnode = None
self.inferred = inferred or {}
"""
:type: dict(seq, seq)
Inferred node contexts to their mapped results
Currently the key is (node, lookupname, callcontext, boundnode)
and the value is tuple of the inferred results
"""
def push(self, node):
"""Push node into inference path
:return: True if node is already in context path else False
:rtype: bool
Allows one to see if the given node has already
been looked at for this inference context"""
name = self.lookupname
if (node, name) in self.path:
return True
self.path.add((node, name))
return False
def clone(self):
"""Clone inference path
For example, each side of a binary operation (BinOp)
starts with the same context but diverge as each side is inferred
so the InferenceContext will need be cloned"""
# XXX copy lookupname/callcontext ?
clone = InferenceContext(copy.copy(self.path), inferred=self.inferred)
clone.callcontext = self.callcontext
clone.boundnode = self.boundnode
return clone
def cache_generator(self, key, generator):
"""Cache result of generator into dictionary
Used to cache inference results"""
results = []
for result in generator:
results.append(result)
yield result
self.inferred[key] = tuple(results)
return
@contextlib.contextmanager
def restore_path(self):
path = set(self.path)
yield
self.path = path
def __str__(self):
state = ('%s=%s' % (field, pprint.pformat(getattr(self, field),
width=80 - len(field)))
for field in self.__slots__)
return '%s(%s)' % (type(self).__name__, ',\n '.join(state))
class CallContext(object):
"""Holds information for a call site."""
__slots__ = ('args', 'keywords')
def __init__(self, args, keywords=None):
self.args = args
if keywords:
keywords = [(arg.arg, arg.value) for arg in keywords]
else:
keywords = []
self.keywords = keywords
def copy_context(context):
if context is not None:
return context.clone()
return InferenceContext()
|
Python
| 0
|
@@ -780,16 +780,75 @@
%22%22%22
+%0A :type: set(tuple(NodeNG, optional(str)))%0A%0A
Path of
@@ -882,24 +882,25 @@
kupname%0A
+%0A
:type: s
@@ -895,32 +895,130 @@
-:type: set(tuple(NodeNG,
+Currently this key is %60%60(node, context.lookupname)%60%60%0A %22%22%22%0A self.lookupname = None%0A %22%22%22%0A :type:
opt
@@ -1026,18 +1026,14 @@
onal
-(str)))%22%22%22
+%5Bstr%5D%0A
%0A
@@ -1037,38 +1037,165 @@
-self.
+The original name of the node%0A%0A e.g.%0A foo = 1%0A The inference of 'foo' is nodes.Const(1) but the
lookup
+
name
-= None
+is 'foo'%0A %22%22%22
%0A
@@ -1231,29 +1231,325 @@
-self.boundnode = None
+%22%22%22%0A :type: optional%5BCallContext%5D%0A%0A The call arguments and keywords for the given context%0A %22%22%22%0A self.boundnode = None%0A %22%22%22%0A :type: optional%5BNodeNG%5D%0A%0A The bound node of the given context%0A%0A e.g. the bound node of object.__new__(cls) is the object node%0A %22%22%22
%0A
@@ -1711,16 +1711,18 @@
key is
+%60%60
(node, l
@@ -1755,16 +1755,18 @@
undnode)
+%60%60
%0A
@@ -3604,32 +3604,188 @@
keywords=None):%0A
+ %22%22%22%0A :param List%5BNodeNG%5D args: Call positional arguments%0A :param Union%5BList%5Bnodes.Keyword%5D, None%5D keywords: Call keywords%0A %22%22%22%0A
self.arg
|
cc963ca1a169506ee46c926fd7e7bc41f0b46780
|
fix import complete_social_login
|
rest_auth/registration/serializers.py
|
rest_auth/registration/serializers.py
|
from django.http import HttpRequest
from django.conf import settings
from rest_framework import serializers
from requests.exceptions import HTTPError
# Import is needed only if we are using social login, in which
# case the allauth.socialaccount will be declared
try:
from allauth.socialaccount.helpers import complete_social_login
except ImportError:
raise ImportError('allauth.socialaccount needs to be installed.')
if 'allauth.socialaccount' not in settings.INSTALLED_APPS:
raise ImportError('allauth.socialaccount needs to be added to INSTALLED_APPS.')
class SocialLoginSerializer(serializers.Serializer):
access_token = serializers.CharField(required=False, allow_blank=True)
code = serializers.CharField(required=False, allow_blank=True)
def _get_request(self):
request = self.context.get('request')
if not isinstance(request, HttpRequest):
request = request._request
return request
def get_social_login(self, adapter, app, token, response):
"""
:param adapter: allauth.socialaccount Adapter subclass. Usually OAuthAdapter or Auth2Adapter
:param app: `allauth.socialaccount.SocialApp` instance
:param token: `allauth.socialaccount.SocialToken` instance
:param response: Provider's response for OAuth1. Not used in the
:return: :return: A populated instance of the `allauth.socialaccount.SocialLoginView` instance
"""
request = self._get_request()
social_login = adapter.complete_login(request, app, token, response=response)
social_login.token = token
return social_login
def validate(self, attrs):
view = self.context.get('view')
request = self._get_request()
if not view:
raise serializers.ValidationError(
'View is not defined, pass it as a context variable'
)
adapter_class = getattr(view, 'adapter_class', None)
if not adapter_class:
raise serializers.ValidationError('Define adapter_class in view')
adapter = adapter_class()
app = adapter.get_provider().get_app(request)
# More info on code vs access_token
# http://stackoverflow.com/questions/8666316/facebook-oauth-2-0-code-and-token
# Case 1: We received the access_token
if('access_token' in attrs):
access_token = attrs.get('access_token')
# Case 2: We received the authorization code
elif('code' in attrs):
self.callback_url = getattr(view, 'callback_url', None)
self.client_class = getattr(view, 'client_class', None)
if not self.callback_url:
raise serializers.ValidationError(
'Define callback_url in view'
)
if not self.client_class:
raise serializers.ValidationError(
'Define client_class in view'
)
code = attrs.get('code')
provider = adapter.get_provider()
scope = provider.get_scope(request)
client = self.client_class(
request,
app.client_id,
app.secret,
adapter.access_token_method,
adapter.access_token_url,
self.callback_url,
scope
)
token = client.get_access_token(code)
access_token = token['access_token']
else:
raise serializers.ValidationError('Incorrect input. access_token or code is required.')
token = adapter.parse_token({'access_token': access_token})
token.app = app
try:
login = self.get_social_login(adapter, app, token, access_token)
complete_social_login(request, login)
except HTTPError:
raise serializers.ValidationError('Incorrect value')
if not login.is_existing:
login.lookup()
login.save(request, connect=True)
attrs['user'] = login.account.user
return attrs
|
Python
| 0.000005
|
@@ -261,22 +261,13 @@
red%0A
-try:%0A from
+%0Aif '
alla
@@ -287,279 +287,156 @@
ount
-.helpers import complete_social_login%0Aexcept ImportError:%0A raise ImportError('allauth.socialaccount needs to be installed.')%0A%0Aif 'allauth.socialaccount' not in settings.INSTALLED_APPS:%0A raise ImportError('allauth.socialaccount needs to be added to INSTALLED_APPS.')
+' in settings.INSTALLED_APPS:%0A try:%0A from allauth.socialaccount.helpers import complete_social_login%0A except ImportError:%0A pass%0A
%0A%0A%0Ac
|
1a9581a33efab4bcf7f1b7a6e555fa373d6f0739
|
Fix repo URL in staging report
|
scripts/GenerateStagingReport.py
|
scripts/GenerateStagingReport.py
|
#coding=UTF-8
from BuildArchetypes import archetypes, getDeploymentContext
import argparse, cgi
parser = argparse.ArgumentParser(description="Build report generator")
parser.add_argument("version", type=str, help="Vaadin version that was just built")
parser.add_argument("deployUrl", type=str, help="Base url of the deployment server")
parser.add_argument("buildResultUrl", type=str, help="URL for the build result page")
parser.add_argument("stagingRepo", type=str, help="URL for the staging repository")
args = parser.parse_args()
content = """<html>
<head></head>
<body>
<table>
"""
content += "<tr><td>Try archetype demos<ul>"
for archetype in archetypes:
content += "<li><a href='{url}/{context}'>{demo}</a></li>\n".format(url=args.deployUrl, demo=archetype, context=getDeploymentContext(archetype, args.version))
content += """</ul></td></tr>
<tr><td><a href="{repoUrl}">Staging repository</a></td></tr>
<tr><td>Eclipse Ivy Settings:<br><pre>"""
content += cgi.escape(""" <ibiblio name="vaadin-staging" usepoms="true" m2compatible="true"
root="{repoUrl}" />""".format(repoUrl=args.stagingRepo))
content += """</pre>
</td></tr>
<tr><td><a href="https://dev.vaadin.com/milestone/Vaadin {version}">Trac Milestone</a></td></tr>
<tr><td><a href="https://dev.vaadin.com/admin/ticket/versions">Add version {version} to Trac</td></tr>
<tr><td><a href="{url}">Staging result page (See test results, pin and tag build and dependencies)</a></td></tr>
</table>
</body>
</html>""".format(url=args.buildResultUrl, repoUrl=args.stagingRepo, version=args.version)
f = open("result/report.html", 'w')
f.write(content)
|
Python
| 0.000032
|
@@ -952,16 +952,49 @@
%3Cpre%3E%22%22%22
+.format(repoUrl=args.stagingRepo)
%0Acontent
|
ceb5f223f2f38969157372b608d03771a9179858
|
Make threading tests work in environment with restricted maxprocs
|
rootpy/logger/tests/test_threading.py
|
rootpy/logger/tests/test_threading.py
|
from __future__ import division
import itertools
import os
import threading
import time
from random import random
import rootpy; log = rootpy.log["rootpy.logger.test.threading"]
rootpy.logger.magic.DANGER.enabled = True
import ROOT
from .logcheck import EnsureLogContains
def optional_fatal(abort=True):
msg = "[rootpy.ALWAYSABORT]" if abort else "[rootpy.NEVERABORT]"
ROOT.Error("rootpy.logger.test", msg)
f = optional_fatal
optional_fatal._bytecode = lambda: map(ord, f.func_code.co_code)
optional_fatal._ORIG_BYTECODE = optional_fatal._bytecode()
optional_fatal._unmodified = lambda: f._bytecode() == f._ORIG_BYTECODE
def optional_fatal_bytecode_check():
assert optional_fatal._unmodified(), (
"Detected modified bytecode. This should never happen.")
number_of_fatals = itertools.count()
total = itertools.count()
def maybe_fatal():
try:
# Throw exceptions 80% of the time
optional_fatal(random() < 0.8)
except rootpy.ROOTError:
number_of_fatals.next()
finally:
total.next()
optional_fatal_bytecode_check()
def randomfatal(should_exit):
while not should_exit.is_set():
maybe_fatal()
#@EnsureLogContains("ERROR", "ALWAYSABORT")
def test_multithread_exceptions():
should_exit = threading.Event()
sup_logger = log["/ROOT.rootpy.logger.test"]
old_level = sup_logger.level
# Suppress test warnings
sup_logger.setLevel(log.CRITICAL)
# Run for 1/4 second or 10s if LONG_TESTS is in the environment
length = float(os.environ.get("TEST_TIME", 0.25))
try:
threads = []
for i in range(100):
t = threading.Thread(target=randomfatal, args=(should_exit,))
t.start()
threads.append(t)
time.sleep(length)
should_exit.set()
for t in threads:
t.join()
finally:
#sup_logger.setLevel(old_level)
pass
tot = total.next()-1
fatals = number_of_fatals.next()-1
log.debug("Success raising exceptions: total: {0} (fatals {1:%})".format(tot, fatals / tot))
|
Python
| 0
|
@@ -64,30 +64,82 @@
ort
-threading%0Aimport time%0A
+resource%0Aimport thread%0Aimport threading%0Aimport time%0A%0Afrom math import ceil
%0Afro
@@ -162,16 +162,29 @@
random%0A%0A
+import ROOT%0A%0A
import r
@@ -282,29 +282,16 @@
= True%0A%0A
-import ROOT%0A%0A
from .lo
@@ -911,17 +911,16 @@
atal():%0A
-%0A
try:
@@ -1231,51 +1231,329 @@
()%0A%0A
-#@EnsureLogContains(%22ERROR%22, %22ALWAYSABORT%22)
+def spareprocs():%0A %22%22%22%0A Compute the maximum number of threads we can start up according to ulimit%0A %22%22%22%0A nmax, _ = resource.getrlimit(resource.RLIMIT_NPROC)%0A me = os.geteuid()%0A return nmax - sum(1 for p in os.listdir(%22/proc%22)%0A if p.isdigit() and os.stat(%22/proc/%22 + p).st_uid == me)%0A
%0Adef
@@ -1951,11 +1951,45 @@
nge(
-100
+min(100, int(ceil(spareprocs()*0.8)))
):%0A
@@ -2061,16 +2061,37 @@
exit,))%0A
+ try:%0A
@@ -2100,24 +2100,28 @@
t.start()%0A
+
@@ -2137,16 +2137,204 @@
ppend(t)
+%0A except thread.error:%0A log.warning(%22Unable to start thread%22)%0A break%0A %0A assert threads, %22Didn't manage to start any threads!%22
%0A%0A
@@ -2450,17 +2450,16 @@
-#
sup_logg
@@ -2485,22 +2485,8 @@
el)%0A
- pass%0A%0A
%0A
@@ -2554,18 +2554,14 @@
-log.debug(
+fmt =
%22Suc
@@ -2583,16 +2583,31 @@
ceptions
+ in %7B0%7D threads
: total:
@@ -2608,17 +2608,17 @@
total: %7B
-0
+1
%7D (fatal
@@ -2624,22 +2624,54 @@
ls %7B
-1
+2
:%25%7D)%22
-.format(
+%0A log.debug(fmt.format(len(threads),
tot,
@@ -2685,8 +2685,10 @@
/ tot))
+%0A%0A
|
998e2a82d1f70411daa5f17909a491f0fa011ba1
|
remove periodic_interval, it is duplicated in common/config.py
|
senlin/profiles/base.py
|
senlin/profiles/base.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from oslo_config import cfg
from senlin.db import api as db_api
from senlin.engine import environment
from senlin.openstack.common import log as logging
interval_opts = [
cfg.IntOpt('healthy_check_interval',
default=60,
help='Interval in seconds for polling healthy status')
]
CONF = cfg.CONF
CONF.register_opts(interval_opts)
LOG = logging.getLogger(__name__)
class Profile(object):
'''Base class for profiles.'''
def __new__(cls, type_name, name, **kwargs):
'''Create a new profile of the appropriate class.'''
if cls != Profile:
ProfileClass = cls
else:
ProfileClass = environment.global_env().get_profile(type_name)
return super(Profile, cls).__new__(ProfileClass)
def __init__(self, type_name, name, **kwargs):
'''Initialize the profile with given parameters and a JSON object.'''
self.name = name
self.type = type_name
self.id = kwargs.get('id', None)
self.permission = kwargs.get('permission', '')
self.spec = kwargs.get('spec', {})
self.tags = kwargs.get('tags', {})
self.created_time = kwargs.get('created_time', None)
self.updated_time = kwargs.get('updated_time', None)
self.deleted_time = kwargs.get('deleted_time', None)
self.context = kwargs.get('context', None)
@classmethod
def from_db_record(cls, context, record):
'''Construct a profile object from database record.
:param context: the context used for DB operations.
:param record: a DB Profle object that contains all required fields.
'''
kwargs = {
'id': record.id,
'spec': record.spec,
'permission': record.permission,
'tags': record.tags,
'created_time': record.created_time,
'updated_time': record.updated_time,
'deleted_time': record.deleted_time,
'context': context,
}
return cls(record.type, record.name, **kwargs)
@classmethod
def load(cls, context, profile_id):
'''Retrieve a profile object from database.'''
record = db_api.profile_get(context, profile_id)
return cls.from_db_record(context, record)
@classmethod
def load_all(cls, context, limit=None, sort_keys=None, marker=None,
sort_dir=None, filters=None, show_deleted=False):
'''Retrieve all profiles from database.'''
records = db_api.profile_get_all(context, limit=limit, marker=marker,
sort_keys=sort_keys,
sort_dir=sort_dir,
filters=filters,
show_deleted=show_deleted)
for record in records:
yield cls.from_db_record(context, record)
@classmethod
def delete(cls, context, profile_id):
db_api.profile_delete(context, profile_id)
def store(self, context):
'''Store the profile into database and return its ID.'''
values = {
'name': self.name,
'type': self.type,
'spec': self.spec,
'permission': self.permission,
'tags': self.tags,
'created_time': datetime.datetime.utcnow()
}
profile = db_api.profile_create(context, values)
self.id = profile.id
return profile.id
@classmethod
def create_object(cls, context, obj):
profile = cls.load(context, obj.profile_id)
return profile.do_create(obj)
@classmethod
def delete_object(cls, context, obj):
profile = cls.load(context, obj.profile_id)
return profile.do_delete(obj)
@classmethod
def update_object(cls, context, obj, new_profile_id):
profile = cls.load(context, obj.profile_id)
new_profile = cls.load(context, new_profile_id)
return profile.do_update(obj, new_profile)
def do_create(self, obj):
'''For subclass to override.'''
return NotImplemented
def do_delete(self, obj):
'''For subclass to override.'''
return NotImplemented
def do_update(self, obj, new_profile):
'''For subclass to override.'''
return NotImplemented
def do_check(self, obj):
'''For subclass to override.'''
return NotImplemented
def healty_check(self, context):
pass
def to_dict(self):
pb_dict = {
'id': self.id,
'name': self.name,
'type': self.type,
'permission': self.permission,
'spec': self.spec,
'tags': self.tags,
'created_time': self.created_time,
'updated_time': self.updated_time,
'deleted_time': self.deleted_time,
}
return pb_dict
@classmethod
def from_dict(cls, **kwargs):
return cls(kwargs)
|
Python
| 0.000001
|
@@ -718,217 +718,8 @@
ng%0A%0A
-interval_opts = %5B%0A cfg.IntOpt('healthy_check_interval',%0A default=60,%0A help='Interval in seconds for polling healthy status')%0A%5D%0A%0ACONF = cfg.CONF%0ACONF.register_opts(interval_opts)%0A
%0ALOG
|
4bef39d1344a832d9a6acfb52173ac493e238139
|
bump version
|
publicprize/config.py
|
publicprize/config.py
|
# -*- coding: utf-8 -*-
""" Flask configuration.
:copyright: Copyright (c) 2014 Bivio Software, Inc. All Rights Reserved.
:license: Apache, see LICENSE for more details.
"""
import os
def _config_from_environ(cfg, prefix):
for k in cfg.keys():
ek = prefix + '_' + k.upper()
if isinstance(cfg[k], dict):
_config_from_environ(cfg[k], ek)
elif ek in os.environ:
t = type(cfg[k])
v = os.environ[ek]
if issubclass(t, (int, bool)):
v = t(v)
cfg[k] = v
def _read_json(filename):
"""Read filename for json"""
with open(filename) as f:
import json
return json.load(f)
class Config(object):
"""Configuration driven off environment variables"""
# DO NOT set cfg['SERVER_NAME'] it breaks uwsgi so nothing is found
# see code in manager.send_event_vote_invites so it can use url_for.
import locale
locale.setlocale(locale.LC_ALL, '')
SQLALCHEMY_COMMIT_ON_TEARDOWN = True
PUBLICPRIZE = _read_json(os.environ.get('PUBLICPRIZE_JSON', 'config.json'))
_config_from_environ(PUBLICPRIZE, 'PUBLICPRIZE')
for k in ['DEBUG', 'ALL_PUBLIC_CONTESTANTS', 'TEST_USER', 'MAIL_DEBUG', 'MAIL_SUPPRESS_SEND']:
if PUBLICPRIZE.get(k, None) is None:
PUBLICPRIZE[k] = PUBLICPRIZE['TEST_MODE']
MAIL_SUPPRESS_SEND = PUBLICPRIZE['MAIL_SUPPRESS_SEND']
import paypalrestsdk
paypalrestsdk.configure(PUBLICPRIZE['PAYPAL'])
SECRET_KEY = PUBLICPRIZE['SECRET_KEY']
DEBUG = PUBLICPRIZE['TEST_MODE']
# Avoid message: "adds significant overhead..."
SQLALCHEMY_TRACK_MODIFICATIONS = False
SQLALCHEMY_DATABASE_URI = \
'postgresql://{user}:{password}@/{name}'.format(**PUBLICPRIZE['DATABASE'])
if PUBLICPRIZE.get('SQLALCHEMY_ECHO') is not None:
SQLALCHEMY_ECHO = PUBLICPRIZE['SQLALCHEMY_ECHO']
if PUBLICPRIZE.get('WTF_CSRF_TIME_LIMIT') is not None:
WTF_CSRF_TIME_LIMIT = PUBLICPRIZE['WTF_CSRF_TIME_LIMIT']
if PUBLICPRIZE.get('WTF_CSRF_ENABLED') is not None:
WTF_CSRF_ENABLED = PUBLICPRIZE['WTF_CSRF_ENABLED']
MAIL_DEFAULT_SENDER = PUBLICPRIZE['SUPPORT_EMAIL']
MAIL_DEBUG = PUBLICPRIZE['MAIL_DEBUG']
PROPAGATE_EXCEPTIONS = True
PUBLICPRIZE['APP_VERSION'] = '20170913.214500'
if PUBLICPRIZE['TEST_MODE']:
import datetime
PUBLICPRIZE['APP_VERSION'] = datetime.datetime.utcnow().strftime(
'%Y%m%d.%H%M%S')
|
Python
| 0
|
@@ -2300,14 +2300,14 @@
7091
-3.2145
+4.0007
00'%0A
|
65ecd399ea82abdafd0a2471193a9c850b50db87
|
Debug level of logging
|
playoff.py
|
playoff.py
|
import traceback
from jfr_playoff.filemanager import PlayoffFileManager
from jfr_playoff.generator import PlayoffGenerator
from jfr_playoff.settings import PlayoffSettings
def main():
interactive = False
try:
import argparse
arg_parser = argparse.ArgumentParser(
description='Generate play-off HTML for JFR Teamy tournaments')
output_args = arg_parser.add_mutually_exclusive_group()
output_args.add_argument('-v', '--verbose', action='store_true',
help='display debug info on STDERR')
output_args.add_argument('-q', '--quiet', action='store_true',
help='suppress warnings on STDERR')
arg_parser.add_argument('config_file', metavar='JSON_FILE',
help='path to config JSON file',
type=str, nargs='?', default=None)
arguments = arg_parser.parse_args()
settings = PlayoffSettings(arguments.config_file)
interactive = settings.interactive
generator = PlayoffGenerator(settings)
content = generator.generate_content()
file_manager = PlayoffFileManager(settings)
file_manager.write_content(content)
file_manager.copy_scripts()
file_manager.send_files()
except SystemExit:
interactive = False
raise
except:
print traceback.format_exc()
finally:
if interactive:
raw_input('Press any key to continue...')
if __name__ == '__main__':
main()
|
Python
| 0
|
@@ -490,32 +490,168 @@
n='store_true',%0A
+ help='display info on STDERR')%0A output_args.add_argument('-vv', '--debug', action='store_true',%0A
|
d20a7e4314745529dd071b62ae1838264fb58e42
|
Change AuthenticationForm to get username field type from User model
|
authtools/forms.py
|
authtools/forms.py
|
from __future__ import unicode_literals
from django import forms, VERSION as DJANGO_VERSION
from django.contrib.auth.forms import (
ReadOnlyPasswordHashField, ReadOnlyPasswordHashWidget,
PasswordResetForm as OldPasswordResetForm,
UserChangeForm as DjangoUserChangeForm,
AuthenticationForm as DjangoAuthenticationForm,
)
from django.contrib.auth import get_user_model
from django.contrib.auth.hashers import identify_hasher
from django.utils.translation import ugettext_lazy as _, ugettext
from django.utils.html import format_html
User = get_user_model()
def is_password_usable(pw):
# like Django's is_password_usable, but only checks for unusable
# passwords, not invalidly encoded passwords too.
try:
# 1.5
from django.contrib.auth.hashers import UNUSABLE_PASSWORD
return pw != UNUSABLE_PASSWORD
except ImportError:
# 1.6
from django.contrib.auth.hashers import UNUSABLE_PASSWORD_PREFIX
return not pw.startswith(UNUSABLE_PASSWORD_PREFIX)
class BetterReadOnlyPasswordHashWidget(ReadOnlyPasswordHashWidget):
"""
A ReadOnlyPasswordHashWidget that has a less intimidating output.
"""
def render(self, name, value, attrs):
try:
from django.forms.utils import flatatt
except ImportError:
from django.forms.util import flatatt # Django < 1.7
final_attrs = flatatt(self.build_attrs(attrs))
if not value or not is_password_usable(value):
summary = ugettext("No password set.")
else:
try:
identify_hasher(value)
except ValueError:
summary = ugettext("Invalid password format or unknown"
" hashing algorithm.")
else:
summary = ugettext('*************')
return format_html('<div{attrs}><strong>{summary}</strong></div>',
attrs=final_attrs, summary=summary)
class UserCreationForm(forms.ModelForm):
"""
A form for creating new users. Includes all the required
fields, plus a repeated password.
"""
error_messages = {
'password_mismatch': _("The two password fields didn't match."),
'duplicate_username': _("A user with that %(username)s already exists."),
}
password1 = forms.CharField(label=_("Password"), widget=forms.PasswordInput)
password2 = forms.CharField(label=_("Password confirmation"),
widget=forms.PasswordInput,
help_text=_("Enter the same password as above,"
" for verification."))
class Meta:
model = User
fields = (User.USERNAME_FIELD,) + tuple(User.REQUIRED_FIELDS)
def __init__(self, *args, **kwargs):
super(UserCreationForm, self).__init__(*args, **kwargs)
def validate_uniqueness_of_username_field(value):
# Since User.username is unique, this check is redundant,
# but it sets a nicer error message than the ORM. See #13147.
try:
User._default_manager.get_by_natural_key(value)
except User.DoesNotExist:
return value
raise forms.ValidationError(self.error_messages['duplicate_username'] % {
'username': User.USERNAME_FIELD,
})
self.fields[User.USERNAME_FIELD].validators.append(validate_uniqueness_of_username_field)
def clean_password2(self):
# Check that the two password entries match
password1 = self.cleaned_data.get("password1")
password2 = self.cleaned_data.get("password2")
if password1 and password2 and password1 != password2:
raise forms.ValidationError(self.error_messages['password_mismatch'])
return password2
def save(self, commit=True):
# Save the provided password in hashed format
user = super(UserCreationForm, self).save(commit=False)
user.set_password(self.cleaned_data["password1"])
if commit:
user.save()
return user
class CaseInsensitiveUsernameFieldCreationForm(UserCreationForm):
"""
This form is the same as UserCreationForm, except that usernames are lowercased before they
are saved. This is to disallow the existence of email address usernames which differ only in
case.
"""
def clean_USERNAME_FIELD(self):
username = self.cleaned_data.get(User.USERNAME_FIELD)
if username:
username = username.lower()
return username
# set the correct clean method on the class so that child classes can override and call super()
setattr(
CaseInsensitiveUsernameFieldCreationForm,
'clean_' + User.USERNAME_FIELD,
CaseInsensitiveUsernameFieldCreationForm.clean_USERNAME_FIELD
)
# alias for the old name for backwards-compatability
CaseInsensitiveEmailUserCreationForm = CaseInsensitiveUsernameFieldCreationForm
class UserChangeForm(forms.ModelForm):
"""
A form for updating users. Includes all the fields on
the user, but replaces the password field with admin's
password hash display field.
"""
password = ReadOnlyPasswordHashField(label=_("Password"),
widget=BetterReadOnlyPasswordHashWidget)
class Meta:
model = User
if DJANGO_VERSION >= (1, 6):
fields = '__all__'
def __init__(self, *args, **kwargs):
super(UserChangeForm, self).__init__(*args, **kwargs)
f = self.fields.get('user_permissions', None)
if f is not None:
f.queryset = f.queryset.select_related('content_type')
def clean_password(self):
# Regardless of what the user provides, return the initial value.
# This is done here, rather than on the field, because the
# field does not have access to the initial value
return self.initial["password"]
class AdminUserChangeForm(UserChangeForm):
def __init__(self, *args, **kwargs):
super(AdminUserChangeForm, self).__init__(*args, **kwargs)
if not self.fields['password'].help_text:
self.fields['password'].help_text = \
DjangoUserChangeForm.base_fields['password'].help_text
class FriendlyPasswordResetForm(OldPasswordResetForm):
error_messages = dict(getattr(OldPasswordResetForm, 'error_messages', {}))
error_messages['unknown'] = _("This email address doesn't have an "
"associated user account. Are you "
"sure you've registered?")
def clean_email(self):
super_clean_email = getattr(
super(FriendlyPasswordResetForm, self), 'clean_email', None)
if callable(super_clean_email): # Django == 1.5
# Django 1.5 sets self.user_cache
return super_clean_email()
# Simulate Django 1.5 behavior in Django >= 1.6.
# This is not as efficient as in Django 1.5, since clean_email() and
# save() will be running the same query twice.
# Whereas Django 1.5 just caches it.
email = self.cleaned_data['email']
qs = User._default_manager.filter(is_active=True, email__iexact=email)
results = [user for user in qs if user.has_usable_password()]
if not results:
raise forms.ValidationError(self.error_messages['unknown'])
return email
class AuthenticationForm(DjangoAuthenticationForm):
def __init__(self, request=None, *args, **kwargs):
super(AuthenticationForm, self).__init__(request, *args, **kwargs)
self.fields['username'].widget = forms.EmailInput()
|
Python
| 0
|
@@ -7613,56 +7613,138 @@
-self.fields%5B'username'%5D.widget = forms.EmailInput()
+username_field = User._meta.get_field(User.USERNAME_FIELD)%0A self.fields%5B'username'%5D.widget = username_field.formfield().widget
%0A
|
3f80c759c55552dce7d45cf5f84e953ac7863974
|
add placeholder for more examples
|
octopus/modules/examples/examples.py
|
octopus/modules/examples/examples.py
|
from octopus.core import app
from flask import Blueprint, render_template
blueprint = Blueprint('examples', __name__)
#@blueprint.route("/")
#def list_examples():
# return render_template("examples/list.html")
@blueprint.route("/ac")
def autocomplete():
return render_template("examples/es/autocomplete.html")
@blueprint.route("/fact")
def fact():
return render_template("examples/sherpafact/proxy.html")
|
Python
| 0
|
@@ -406,16 +406,264 @@
ct/proxy.html%22)%0A
+%0A@blueprint.route(%22/clientjs%22)%0Adef clientjs():%0A pass%0A%0A@blueprint.route(%22/epmc%22)%0Adef epmc():%0A pass%0A%0A@blueprint.route(%22/romeo%22)%0Adef romeo():%0A # at the moment the romeo endpoint only deals with downloads, which is not very demoable%0A pass%0A%0A
|
87138c86f8ac98b79e523d5aac89a8e86b519eeb
|
Fix failing build - test deliberatly moved to error for some investigations. Fixed now.
|
tests/app/main/views/test_authentication.py
|
tests/app/main/views/test_authentication.py
|
from flask import json
from client.jwt import create_jwt_token
def test_should_not_allow_request_with_no_token(notify_api):
response = notify_api.test_client().get("/")
assert response.status_code == 401
data = json.loads(response.get_data())
assert data['error'] == 'Unauthorized, authentication token must be provided'
def test_should_not_allow_request_with_incorrect_header(notify_api):
response = notify_api.test_client().get(
"/",
headers={
'Authorization': 'Basic 1234'
}
)
assert response.status_code == 401
data = json.loads(response.get_data())
assert data['error'] == 'Unauthorized, authentication bearer scheme must be used'
def test_should_not_allow_request_with_incorrect_token(notify_api):
response = notify_api.test_client().get(
"/",
headers={
'Authorization': 'Bearer 1234'
}
)
assert response.status_code == 403
data = json.loads(response.get_data())
assert data['error'] == 'Invalid token: signature'
def test_should_not_allow_incorrect_path(notify_api):
token = create_jwt_token(request_method="GET", request_path="/bad", secret="secret", client_id="client_id")
response = notify_api.test_client().get(
"/",
headers={
'Authorization': "Bearer {}".format(token)
}
)
assert response.status_code == 403
data = json.loads(response.get_data())
assert data['error'] == 'Invalid token: request'
def test_should_not_allow_incorrect_method(notify_api):
token = create_jwt_token(request_method="POST", request_path="/", secret="secret", client_id="client_id")
response = notify_api.test_client().get(
"/",
headers={
'Authorization': "Bearer {}".format(token)
}
)
assert response.status_code == 403
data = json.loads(response.get_data())
assert data['error'] == 'Invalid token: request'
def test_should_not_allow_invalid_secret(notify_api):
token = create_jwt_token(request_method="POST", request_path="/", secret="not-so-secret", client_id="client_id")
response = notify_api.test_client().get(
"/",
headers={
'Authorization': "Bearer {}".format(token)
}
)
assert response.status_code == 403
data = json.loads(response.get_data())
assert data['error'] == 'Invalid token: signature'
def test_should_allow_valid_token(notify_api):
token = create_jwt_token(request_method="GET", request_path="/", secret="secret", client_id="client_id")
response = notify_api.test_client().get(
"/",
headers={
'Authorization': 'Bearer {}'.format(token)
}
)
assert response.status_code == 201
def test_should_allow_valid_token_with_post_body(notify_api):
json_body = json.dumps({
"key1": "value1",
"key2": "value2",
"key3": "value3"
})
token = create_jwt_token(
request_method="POST",
request_path="/",
secret="secret",
client_id="client_id",
request_body=json_body
)
response = notify_api.test_client().post(
"/",
data=json_body,
headers={
'Authorization': 'Bearer {}'.format(token)
}
)
assert response.status_code == 200
def test_should_not_allow_valid_token_with_invalid_post_body(notify_api):
json_body = json.dumps({
"key1": "value1",
"key2": "value2",
"key3": "value3"
})
token = create_jwt_token(
request_method="POST",
request_path="/",
secret="secret",
client_id="client_id",
request_body=json_body
)
response = notify_api.test_client().post(
"/",
data="spurious",
headers={
'Authorization': 'Bearer {}'.format(token)
}
)
assert response.status_code == 403
data = json.loads(response.get_data())
assert data['error'] == 'Invalid token: payload'
|
Python
| 0
|
@@ -2746,17 +2746,17 @@
de == 20
-1
+0
%0A%0A%0Adef t
|
2668829d114031ba6fa641bb989988368371917b
|
add program lookup to choice group admin hotfix
|
open_programs/apps/programs/admin.py
|
open_programs/apps/programs/admin.py
|
from django.contrib import admin
from reversion.admin import VersionAdmin
from ajax_select.admin import AjaxSelectAdmin
from ajax_select import make_ajax_form
from .models import Program, TrainingTarget, ProgramCompetence, ProgramModules, TargetModules, ChoiceGroup, ChoiceGroupType, LearningPlan
@admin.register(Program)
class ProgramAdmin(VersionAdmin):
list_display = (
'title',
"training_direction",
'chief',
"level",
'created',
'updated',
'archived',
'status',
)
list_filter = ("level", 'created', 'updated', 'status', 'archived',)
filter_horizontal = ("learning_plans", )
@admin.register(TrainingTarget)
class TrainingTargetAdmin(VersionAdmin):
list_display = (
"title",
"number"
) # TODO: "program"
list_filter = (
"program",
"number"
)
@admin.register(ProgramCompetence)
class ProgramCompetenceAdmin(VersionAdmin):
list_display = ("title", "number", "program")
list_filter = ("title", "number")
search_fields = ("title", )
@admin.register(ProgramModules)
class ProgramModulesAdmin(VersionAdmin):
list_display = ("id", "semester", "module", "program", "choice_group", "competence")
list_filter = ("program", "semester",)
raw_id_fields = ("module", )
@admin.register(TargetModules)
class TargetModulesAdmin(VersionAdmin):
list_display = ("id", ) # TODO: "choice_group", "program_module", "target"
@admin.register(ChoiceGroup)
class ChoiceGroupAdmin(VersionAdmin, AjaxSelectAdmin):
list_display = ("id", "program", "title", "labor", "choice_group_type", "number")
form = make_ajax_form(Program, {'program': 'program'})
@admin.register(ChoiceGroupType)
class ChoiceGroupTypeAdmin(VersionAdmin):
list_display = ("title", )
@admin.register(LearningPlan)
class LearningPlanAdmin(VersionAdmin):
list_display = ('uni_displayableTitle', 'uni_number', 'uni_title', 'uni_stage', 'uni_loadTimeType')
|
Python
| 0
|
@@ -1665,23 +1665,27 @@
ax_form(
-Program
+ChoiceGroup
, %7B'prog
|
01c8f0ebc4669d88576d2e66c57ac51863fd31fe
|
Fix ignore pattern
|
autotweet/learn.py
|
autotweet/learn.py
|
""":mod:`autotweet.learn` --- Learning your tweets
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This module learns your tweets and store it to database.
"""
import logging
import re
import sqlalchemy
import time
import tweepy
from .database import add_document, get_session
from .twitter import CONSUMER_KEY, CONSUMER_SECRET, OAuthToken, strip_tweet
MY_CLIENT_NAME = 'learn your tweet'
IGNORE_PATTERN = re.compile(r'@\w+\s{2,}')
logger = logging.getLogger('collector')
def check_ignore(status):
if hasattr(status, 'retweeted_status'):
return True
if status.source == MY_CLIENT_NAME:
return True
if IGNORE_PATTERN.match(status.text):
return True
return False
class MyMentionListener(tweepy.streaming.StreamListener):
def __init__(self, api, db_url):
super(MyMentionListener, self).__init__()
self.api = api
self.db_url = db_url
self.db_session = get_session(db_url)
self.me = api.me()
def on_status(self, status):
if check_ignore(status):
return True
if status.user.id == self.me.id and status.in_reply_to_status_id:
original_status = self.api.get_status(status.in_reply_to_status_id)
question = strip_tweet(original_status.text)
answer = strip_tweet(status.text, remove_url=False)
if question and answer:
try:
add_document(self.db_session, question, answer)
except sqlalchemy.exc.OperationalError:
self.db_session = get_session(self.db_url)
add_document(self.db_session, question, answer)
return True
def polling_timeline(api, db_url):
db_session = get_session(db_url)
me = api.me()
last_id = me.status.id
logger.debug('tracking from status id: {0}'.format(last_id))
while 1:
time.sleep(60)
logger.debug('polling from status id: {0}'.format(last_id))
statuses = me.timeline(since_id=last_id)
if statuses:
statuses.reverse()
last_id = statuses[-1].id
else:
continue
for status in statuses:
if check_ignore(status):
continue
if not status.in_reply_to_status_id:
continue
original_status = api.get_status(status.in_reply_to_status_id)
question = strip_tweet(original_status.text)
answer = strip_tweet(status.text, remove_url=False)
if question and answer:
try:
add_document(db_session, question, answer)
except sqlalchemy.exc.OperationalError:
db_session = get_session(db_url)
add_document(db_session, question, answer)
def learning_daemon(token, db_url, streaming=False):
if not isinstance(token, OAuthToken):
token = OAuthToken.from_string(token)
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(token.key, token.secret)
api = tweepy.API(auth)
if streaming:
listener = MyMentionListener(api, db_url)
stream = tweepy.Stream(auth, listener)
stream.userstream()
else:
polling_timeline(api, db_url)
|
Python
| 0.000003
|
@@ -418,16 +418,26 @@
mpile(r'
+(@%5Cw+%5Cs+)*
@%5Cw+%5Cs%7B2
|
d632b78c4fe41fff6511a4bcbb8ec9c13a34c066
|
Add test for LogEntry
|
tests/core/tests/admin_integration_tests.py
|
tests/core/tests/admin_integration_tests.py
|
from __future__ import unicode_literals
import os.path
from django.test.testcases import TestCase
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
from core.admin import BookAdmin
class ImportExportAdminIntegrationTest(TestCase):
def setUp(self):
user = User.objects.create_user('admin', 'admin@example.com',
'password')
user.is_staff = True
user.is_superuser = True
user.save()
self.client.login(username='admin', password='password')
def test_import_export_template(self):
response = self.client.get('/admin/core/book/')
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response,
'admin/import_export/change_list_import_export.html')
self.assertContains(response, _('Import'))
self.assertContains(response, _('Export'))
def test_import(self):
input_format = '0'
filename = os.path.join(
os.path.dirname(__file__),
os.path.pardir,
'exports',
'books.csv')
with open(filename, "rb") as f:
data = {
'input_format': input_format,
'import_file': f,
}
response = self.client.post('/admin/core/book/import/', data)
self.assertEqual(response.status_code, 200)
self.assertIn('result', response.context)
self.assertFalse(response.context['result'].has_errors())
self.assertIn('confirm_form', response.context)
confirm_form = response.context['confirm_form']
data = confirm_form.initial
response = self.client.post('/admin/core/book/process_import/', data,
follow=True)
self.assertEqual(response.status_code, 200)
self.assertContains(response, _('Import finished'))
def test_export(self):
response = self.client.get('/admin/core/book/export/')
self.assertEqual(response.status_code, 200)
data = {
'file_format': '0',
}
response = self.client.post('/admin/core/book/export/', data)
self.assertEqual(response.status_code, 200)
self.assertTrue(response.has_header("Content-Disposition"))
def test_import_export_buttons_visible_without_add_permission(self):
# issue 38 - Export button not visible when no add permission
original = BookAdmin.has_add_permission
BookAdmin.has_add_permission = lambda self, request: False
response = self.client.get('/admin/core/book/')
BookAdmin.has_add_permission = original
self.assertContains(response, _('Export'))
self.assertContains(response, _('Import'))
def test_import_file_name_in_tempdir(self):
# 65 - import_file_name form field can be use to access the filesystem
import_file_name = os.path.join(
os.path.dirname(__file__),
os.path.pardir,
'exports',
'books.csv')
data = {
'input_format': "0",
'import_file_name': import_file_name,
}
with self.assertRaises(IOError):
self.client.post('/admin/core/book/process_import/', data)
|
Python
| 0
|
@@ -192,16 +192,65 @@
azy as _
+%0Afrom django.contrib.admin.models import LogEntry
%0A%0Afrom c
@@ -3290,24 +3290,919 @@
process_import/', data)%0A
+%0A def test_import_log_entry(self):%0A input_format = '0'%0A filename = os.path.join(%0A os.path.dirname(__file__),%0A os.path.pardir,%0A 'exports',%0A 'books.csv')%0A with open(filename, %22rb%22) as f:%0A data = %7B%0A 'input_format': input_format,%0A 'import_file': f,%0A %7D%0A response = self.client.post('/admin/core/book/import/', data)%0A self.assertEqual(response.status_code, 200)%0A confirm_form = response.context%5B'confirm_form'%5D%0A data = confirm_form.initial%0A response = self.client.post('/admin/core/book/process_import/', data,%0A follow=True)%0A self.assertEqual(response.status_code, 200)%0A book = LogEntry.objects.latest('id')%0A self.assertEqual(book.object_repr, %22Some book%22)%0A self.assertEqual(book.object_id, str(1))%0A
|
2079876aa3ef9a0a6798dbfee7199864f7affcdb
|
check what the sequences() graph result is
|
app/modules/phylotyper/sequences.py
|
app/modules/phylotyper/sequences.py
|
"""Classes for retrieving Marker sequences
Example:
$ python sequences.py -s stx1
"""
from middleware.decorators import submit, prefix, tojson
from middleware.graphers import turtle_utils
from routes.job_utils import fetch_job
@submit
@prefix
def marker_query(marker_uris):
query = '''
SELECT ?m
WHERE {{
?m rdf:type :Marker .
VALUES ?m {{ {} }}
}}
'''.format(' '.join(marker_uris))
return query
@prefix
def sequence_query(marker_rdf, isolate_rdf):
query = '''
SELECT ?contig ?contigid ?region ?start ?len ?seq
WHERE {{
?g a :spfyId .
VALUES ?g {{ {} }}
?contig a g:Contig ;
g:Identifier ?contigid .
?m a :VirulenceFactor .
?contig g:DNASequence ?dna .
VALUES ?m {{ {} }} .
?region a faldo:Region ;
:hasPart ?m ;
:isFoundIn ?contig ;
:isFoundIn ?g ;
faldo:begin ?b ;
faldo:end ?e .
?b faldo:position ?beginPos .
?e faldo:position ?endPos .
BIND( IF(?beginPos < ?endPos,?beginPos,?endPos) as ?start)
BIND( IF(?beginPos < ?endPos,?endPos-?beginPos+1,?beginPos-?endPos+1) as ?len)
BIND( SUBSTR( ?dna, ?start, ?len ) as ?seq )
}}
'''.format(isolate_rdf, ' '.join(marker_rdf))
return query
@tojson
@submit
@prefix
def phylotyper_query(subtypescheme_rdf, isolate_rdf):
query = '''
SELECT DISTINCT ?pt ?typeLabel ?score ?region ?contigid ?beginPos ?endPos
WHERE {{
?pt a subt:PTST ;
subt:isOfPhylotyper {} ;
subt:hasIdentifiedClass ?type ;
subt:score ?score ;
typon:hasIdentifiedAllele ?a .
?type subt:subtypeValue ?typeLabel .
?a faldo:location ?region .
?region :isFoundIn {} ;
:isFoundIn ?contig ;
faldo:begin ?b ;
faldo:end ?e .
?contig a g:Contig ;
g:Identifier ?contigid .
?b faldo:position ?beginPos .
?e faldo:position ?endPos
}}
'''.format(subtypescheme_rdf, isolate_rdf)
return query
@tojson
@submit
@prefix
def genename_query(locus_rdf):
query = '''
SELECT ?markerURI ?markerLabel
WHERE {{
VALUES ?l {{ {} }} .
?markerURI :isFoundIn ?l ;
a :Marker ;
dc:description ?markerLabel
}}
'''.format(locus_rdf)
return query
class MarkerSequences(object):
"""Retrieve DNA region sequences for one or more Markers
"""
def __init__(self, markers=[':stx2A',':stx2B'], job_id=None, job_turtle=None, job_ectyper_datastruct_vf=None, redis_conn=None):
"""Constructor
Args:
"""
# convert to proper RDF terms
self.marker_uris = [turtle_utils.normalize_rdfterm(m) for m in markers]
# Retrieve and merge graphs from pre-req. jobs.
self.graph = fetch_job(job_id, redis_conn).result + fetch_job(job_turtle, redis_conn).result + fetch_job(job_ectyper_datastruct_vf, redis_conn).result
def sequences(self, genome_uri):
"""Retrieve sequences for object alleles
Args:
genome_uri(str): Genome URI
Returns:
dictionary
"""
genome_rdf = turtle_utils.normalize_rdfterm(genome_uri)
query = sequence_query(self.marker_uris, genome_rdf)
# query_result = sequence_query(self.marker_uris, genome_rdf)
query_result = self.graph.query(query)
print(query_result)
# Unroll result into dictionary with fasta-like keys
seqdict = { "spfy|{}| {}:{}..{}".format(
turtle_utils.fulluri_to_basename(r['region']),
r['contigid'], r['start'], r['start']+r['len']-1): r['seq'] for r in query_result }
return seqdict
def fasta(self, genome_uri):
"""Retrieve sequences for object alleles
Args:
genome_uri(str): Genome URI
Returns:
fasta format string or None
"""
seqdict = self.sequences(genome_uri)
if not seqdict:
return None
fasta_string = ''
for (h,s) in seqdict.iteritems():
fasta_string += ">{}\n{}\n".format(h,s)
return fasta_string
if __name__=='__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument(
"-m",
nargs="*",
help="Marker URI",
required=True
)
parser.add_argument(
"-g",
help="Genome URI",
required=True
)
args = parser.parse_args()
ms = MarkerSequences(args.m)
print ms.sequences(args.g)
|
Python
| 0
|
@@ -3653,28 +3653,68 @@
print(
-query_result
+'sequences() query_result: %7B0%7D'.format(query_result)
)%0A%0A
|
71b6f040e161c9a169e86d7a87fdd8038cf5961e
|
Add get_all_instance_types to AZURE
|
apps/domain/src/main/core/infrastructure/providers/azure/utils.py
|
apps/domain/src/main/core/infrastructure/providers/azure/utils.py
|
import subprocess
import click
from PyInquirer import prompt
from ...utils import Config, styles
class AZ:
def locations_list(self):
proc = subprocess.Popen(
"az account list-locations --query '[].{DisplayName:displayName}' --output table",
shell=True,
stdout=subprocess.PIPE,
universal_newlines=True,
)
locations = proc.stdout.read()
return locations.split("\n")[2:]
def get_azure_config() -> Config:
"""Getting the configration required for deployment on AZURE.
Returns:
Config: Simple Config with the user inputs
"""
az = AZ()
subscription_id = prompt(
[
{
"type": "input",
"name": "subscription_id",
"message": "Please provide your subscription_id",
"default": "00000000-0000-0000-0000-000000000000",
}
],
style=styles.second,
)["subscription_id"]
client_id = prompt(
[
{
"type": "input",
"name": "client_id",
"message": "Please provide your client_id",
"default": "00000000-0000-0000-0000-000000000000",
}
],
style=styles.second,
)["client_id"]
client_secret = prompt(
[
{
"type": "input",
"name": "client_secret",
"message": "Please provide your client_secret",
"default": "XXXX-XXXX-XXX-XXX-XXX",
}
],
style=styles.second,
)["client_secret"]
tenant_id = prompt(
[
{
"type": "input",
"name": "tenant_id",
"message": "Please provide your tenant_id",
"default": "00000000-0000-0000-0000-000000000000",
}
],
style=styles.second,
)["tenant_id"]
location = prompt(
[
{
"type": "list",
"name": "location",
"message": "Please select your desired location",
"choices": az.locations_list(),
}
],
style=styles.second,
)["location"]
return Config(
location=location,
subscription_id=subscription_id,
client_id=client_id,
client_secret=client_secret,
tenant_id=tenant_id,
)
|
Python
| 0.000003
|
@@ -25,16 +25,28 @@
t click%0A
+import json%0A
from PyI
@@ -463,16 +463,384 @@
)%5B2:%5D%0A%0A%0A
+def get_all_instance_types(location=None):%0A proc = subprocess.Popen(%0A f%22az vm list-sizes --location %7Blocation%7D%22,%0A shell=True,%0A stdout=subprocess.PIPE,%0A universal_newlines=True,%0A )%0A machines = json.loads(proc.stdout.read())%0A all_instances = %7B%22all_instances%22: %5Bmachine%5B%22name%22%5D for machine in machines%5D%7D%0A return all_instances%0A%0A%0A
def get_
|
ec5cb4e878dae00bb6b23965c6c466ee29727583
|
Update HashFilter
|
pybloom/hashfilter.py
|
pybloom/hashfilter.py
|
import time
class HashFilter(object):
'''
Plain Temporal Hash Filter for testing purposes
'''
def __init__(self, expiration):
self.expiration = expiration
self.unique_items = {}
def add(self, key, timestamp = None):
if key in self.unique_items:
if not timestamp:
timestamp = time.time()
self.unique_items[key] = int(timestamp) + self.expiration
return True
else:
if not timestamp:
timestamp = time.time()
self.unique_items[key] = int(timestamp) + self.expiration
return False
def contains(self, key, timestamp):
timestamp = int(timestamp)
if key in self.unique_items:
if timestamp < self.unique_items[key]:
return True
else:
del self.unique_items[key]
return False
def __contains__(self, key):
timestamp = time.time()
if key in self.unique_items:
if timestamp < self.unique_items[key]:
return True
else:
del self.unique_items[key]
return False
|
Python
| 0
|
@@ -247,16 +247,51 @@
None):%0A
+ timestamp = int(timestamp)%0A
@@ -326,36 +326,32 @@
%0A if
-not
timestamp:%0A
@@ -347,50 +347,36 @@
tamp
-:%0A timestamp = time.time()%0A
+ %3C self.unique_items%5Bkey%5D:%0A
@@ -375,32 +375,35 @@
%5D:%0A
+
self.unique_item
@@ -403,36 +403,32 @@
ue_items%5Bkey%5D =
-int(
timestamp) + sel
@@ -412,33 +412,32 @@
key%5D = timestamp
-)
+ self.expirati
@@ -431,32 +431,36 @@
self.expiration%0A
+
retu
@@ -467,32 +467,36 @@
rn True%0A
+
else:%0A
@@ -493,74 +493,8 @@
- if not timestamp:%0A timestamp = time.time()%0A
@@ -526,20 +526,16 @@
%5Bkey%5D =
-int(
timestam
@@ -535,17 +535,16 @@
imestamp
-)
+ self.
@@ -558,32 +558,36 @@
ion%0A
+
+
return False%0A%0A
@@ -587,101 +587,33 @@
lse%0A
-%0A def contains(self, key, timestamp):%0A timestamp = int(timestamp)%0A if key in
+ else:%0A
sel
@@ -626,32 +626,23 @@
ue_items
-:%0A if
+%5Bkey%5D =
timesta
@@ -648,127 +648,26 @@
amp
-%3C
++
self.
-unique_items%5Bkey%5D:%0A return True%0A else:%0A del self.unique_items%5Bkey%5D%0A
+expiration%0A
@@ -700,18 +700,16 @@
def
-__
contains
__(s
@@ -704,18 +704,16 @@
contains
-__
(self, k
@@ -714,16 +714,27 @@
elf, key
+, timestamp
):%0A
@@ -752,18 +752,21 @@
p =
-time.time(
+int(timestamp
)%0A
@@ -949,29 +949,28 @@
return False%0A
-%0A
|
ab13290364a40c0592ed347bf7b91110afaa7115
|
Fix test_json
|
openfisca_france/tests/test_jsons.py
|
openfisca_france/tests/test_jsons.py
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <contact@openfisca.fr>
#
# Copyright (C) 2011, 2012, 2013, 2014 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
########### DESCRIPTION ############
## Ce programme teste tous les fichiers .json créés par un script et renvoie les erreurs d'OpenFisca
import json
import os
import sys
from biryani1.baseconv import check
import numpy as np
import openfisca_france
from openfisca_france.scripts.compare_openfisca_impots import compare_variable
TaxBenefitSystem = openfisca_france.init_country()
tax_benefit_system = TaxBenefitSystem()
def test():
path = os.path.join(os.path.dirname(__file__), 'json')
err = 1
for fichier in os.listdir(path):
with open(os.path.join(path, fichier)) as officiel:
try:
content = json.load(officiel)
except:
print fichier
official_result = content['resultat_officiel']
json_scenario = content['scenario']
scenario = check(tax_benefit_system.Scenario.make_json_to_instance(
tax_benefit_system = tax_benefit_system))(json_scenario)
year = json_scenario['year']
totpac = scenario.test_case['foyers_fiscaux'].values()[0].get('personnes_a_charge')
simulation = scenario.new_simulation()
for code, field in official_result.iteritems():
if compare_variable(code, field, simulation, totpac, fichier, year):
err = 0
assert err, "Erreur"
if __name__ == "__main__":
sys.exit(test())
|
Python
| 0.998619
|
@@ -1934,16 +1934,88 @@
'year'%5D%0A
+ print scenario%0A# print scenario.test_case.keys()%0A
@@ -2027,33 +2027,34 @@
otpac = scenario
-.
+%5B'
test_case%5B'foyer
@@ -2046,16 +2046,18 @@
est_case
+'%5D
%5B'foyers
|
21ecb742db85801aa2ac74fcfd2b338b6a396dee
|
fix linter
|
tests/unit/modules/test_libcloud_storage.py
|
tests/unit/modules/test_libcloud_storage.py
|
# -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Anthony Shaw <anthonyshaw@apache.org>`
'''
# Import Python Libs
from __future__ import absolute_import, unicode_literals, print_function
# Import Salt Testing Libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
patch,
MagicMock,
NO_MOCK,
NO_MOCK_REASON
)
import salt.modules.libcloud_storage as libcloud_storage
try:
from libcloud.storage.base import Container, BaseDriver, Object
HAS_LIBCLOUD = True
except ImportError:
HAS_LIBCLOUD = False
if HAS_LIBCLOUD:
class MockStorageDriver(BaseDriver):
def __init__(self):
self._TEST_CONTAINER = Container(name='test_container', extra={}, driver=self)
self._TEST_OBJECT = Object(name='test_obj',
size=1234,
hash='123sdfsdf',
extra={},
meta_data={'key': 'value'},
container=self._TEST_CONTAINER,
driver=self)
def list_containers(self):
return [self._TEST_CONTAINER]
def get_container(self, container_name):
assert container_name == 'test_container'
return self._TEST_CONTAINER
def list_container_objects(self, container):
assert container.name == 'test_container'
return [self._TEST_OBJECT]
def create_container(self, container_name):
assert container_name == 'new_test_container'
return self._TEST_CONTAINER
def get_container_object(self, container_name, object_name):
assert container_name == 'test_container'
assert object_name == 'test_obj'
return self._TEST_OBJECT
def get_mock_driver():
return MockStorageDriver()
@skipIf(NO_MOCK, NO_MOCK_REASON)
@skipIf(not HAS_LIBCLOUD, 'No libcloud installed')
@patch('salt.modules.libcloud_storage._get_driver',
MagicMock(return_value=MockStorageDriver()))
class LibcloudStorageModuleTestCase(TestCase, LoaderModuleMockMixin):
def setup_loader_modules(self):
module_globals = {
'__salt__': {
'config.option': MagicMock(return_value={
'test': {
'driver': 'test',
'key': '2orgk34kgk34g'
}
})
}
}
if libcloud_storage.HAS_LIBCLOUD is False:
module_globals['sys.modules'] = {'libcloud': MagicMock()}
return {libcloud_storage: module_globals}
def test_module_creation(self):
client = libcloud_storage._get_driver('test')
self.assertFalse(client is None)
def test_init(self):
with patch('salt.utils.compat.pack_dunder', return_value=False) as dunder:
libcloud_storage.__init__(None)
dunder.assert_called_with('salt.modules.libcloud_storage')
def test_list_containers(self):
containers = libcloud_storage.list_containers('test')
self.assertEqual(len(containers), 1)
self.assertEqual(containers[0]['name'], 'test_container')
def test_list_container_objects(self):
objects = libcloud_storage.list_container_objects('test_container', 'test')
self.assertEqual(len(objects), 1)
self.assertEqual(objects[0]['name'], 'test_obj')
self.assertEqual(objects[0]['size'], 1234)
def test_create_container(self):
container = libcloud_storage.create_container('new_test_container', 'test')
self.assertEqual(container['name'], 'test_container')
def test_get_container(self):
container = libcloud_storage.get_container('test_container', 'test')
self.assertEqual(container['name'], 'test_container')
def test_get_container_object(self):
obj = libcloud_storage.get_container_object('test_container', 'test_obj', 'test')
self.assertEqual(obj['name'], 'test_obj')
self.assertEqual(obj['size'], 1234)
|
Python
| 0.000002
|
@@ -691,24 +691,49 @@
nit__(self):
+ # pylint: disable=W0231
%0A
|
72ec0d82bfa59d14dbd9e8ffd89ddcfc990fc4fe
|
Fix #14
|
pygraphml/__init__.py
|
pygraphml/__init__.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from .attribute import Attribute
from .item import Item
from .point import Point
from .node import Node
from .edge import Edge
from .graph import Graph
from .graphml_parser import GraphMLParser
__version__ = '2.1.3'
|
Python
| 0.000001
|
@@ -386,7 +386,7 @@
2.1.
-3
+4
'%0A
|
145dc7d22301cffdf43498924f1a908f0d160512
|
Allow deep recursion
|
pylint_runner/main.py
|
pylint_runner/main.py
|
#!/usr/bin/env python
# pylint: disable=no-member
"""
Runs pylint on all contained python files in this directory, printint out
nice colorized warnings/errors without all the other report fluff
"""
from __future__ import print_function
import os
import sys
import colorama
import pylint
import pylint.lint
__author__ = "Matthew 'MasterOdin' Peveler"
__license__ = "The MIT License (MIT)"
IGNORE_FOLDERS = [".git", ".idea", "__pycache__"]
ARGS = ["--reports=n", "--output-format=colorized", "--disable=locally-disabled"]
colorama.init(autoreset=True)
def runner(output=None, error=None):
"""
Runs pylint on all python files in the current directory
"""
pylint_output = output if output is not None else sys.stdout
pylint_error = error if error is not None else sys.stderr
savedout, savederr = sys.__stdout__, sys.__stderr__
sys.stdout = pylint_output
sys.stderr = pylint_error
pylint_files = get_files_from_dir(os.curdir)
version = '.'.join([str(x) for x in sys.version_info[0:3]])
print("Using pylint " + colorama.Fore.RED + pylint.__version__ +
colorama.Fore.RESET + " for python " + colorama.Fore.RED +
version + colorama.Fore.RESET)
print("pylint running on the following files:")
for pylint_file in pylint_files:
split_file = pylint_file.split("/")
split_file[-1] = colorama.Fore.CYAN + split_file[-1] + colorama.Fore.RESET
pylint_file = '/'.join(split_file)
print("- " + pylint_file)
print("----")
run = pylint.lint.Run(ARGS + pylint_files, exit=False)
sys.stdout = savedout
sys.stderr = savederr
sys.exit(run.linter.msg_status)
def get_files_from_dir(current_dir):
"""
Recursively walk through a directory and get all python files and then walk
through any potential directories that are found off current directory,
so long as not within IGNORE_FOLDERS
:return: all python files that were found off current_dir
"""
if current_dir[-1] != "/" and current_dir != ".":
current_dir += "/"
files = []
for dir_file in os.listdir(current_dir):
if current_dir != ".":
file_path = current_dir + dir_file
else:
file_path = dir_file
if os.path.isfile(file_path):
file_split = os.path.splitext(dir_file)
if len(file_split) == 2 and file_split[0] != "" \
and file_split[1] == '.py':
files.append(file_path)
elif os.path.isdir(dir_file) and dir_file not in IGNORE_FOLDERS:
path = dir_file+"/"
if current_dir != "" and current_dir != ".":
path = current_dir.rstrip("/")+"/"+path
files += get_files_from_dir(path)
return files
|
Python
| 0.999987
|
@@ -2495,16 +2495,17 @@
elif
+(
os.path.
@@ -2519,16 +2519,45 @@
ir_file)
+ or os.path.isdir(file_path))
and dir
@@ -2612,17 +2612,19 @@
dir_file
-+
+ +
%22/%22%0A
@@ -2726,13 +2726,17 @@
%22/%22)
-+%22/%22+
+ + %22/%22 +
path
|
4d4531cc32f9de583f81ad41fe465059de2f8bd3
|
load data during cei test
|
pyon/util/int_test.py
|
pyon/util/int_test.py
|
#!/usr/bin/env python
"""Integration test base class and utils"""
from pyon.container.cc import Container
from pyon.core.bootstrap import bootstrap_pyon, service_registry
from pyon.datastore.datastore import DatastoreManager
from pyon.event.event import EventRepository
from pyon.ion.directory import Directory
from pyon.ion.state import StateRepository
from pyon.util.containers import DotDict, dict_merge, DictModifier
from pyon.util.log import log
from mock import patch
from pyon.public import CFG
from pyon.datastore.couchdb.couchdb_datastore import CouchDB_DataStore
from pyon.datastore.mockdb.mockdb_datastore import MockDB_DataStore
from contextlib import contextmanager
import unittest
import os
# Make this call more deterministic in time.
bootstrap_pyon()
scanned_services = False
class IonIntegrationTestCase(unittest.TestCase):
"""
Base test class to allow operations such as starting the container
TODO: Integrate with IonUnitTestCase
"""
def run(self, result=None):
unittest.TestCase.run(self, result)
@contextmanager
def start_container(self):
"""
Context Manager for container in tests.
To use:
with self.start_container() as cc:
# your tests in here
# container stopped here
"""
self._start_container()
try:
yield self.container
finally:
self._stop_container()
def _start_container(self):
# hack to force queue auto delete on for int tests
self._turn_on_queue_auto_delete()
self._patch_out_diediedie()
db_type = os.environ.get('DB_TYPE', None)
if not db_type:
pass
elif db_type == 'MOCK':
self._turn_on_mockdb()
elif db_type == 'COUCH':
self._turn_on_couchdb()
if os.environ.get('CEI_LAUNCH_TEST', None):
self._patch_out_start_rel()
self._turn_on_couchdb()
self._turn_off_force_clean()
self.container = None
self.addCleanup(self._stop_container)
self.container = Container()
self.container.start()
# For integration tests, if class variable "service_dependencies" exists
self._start_dependencies()
def _stop_container(self):
if self.container:
self.container.stop()
self.container = None
self._force_clean()
def _turn_on_queue_auto_delete(self):
patcher = patch('pyon.net.channel.RecvChannel._queue_auto_delete', True)
patcher.start()
self.addCleanup(patcher.stop)
def _patch_out_diediedie(self):
"""
If things are running slowly, diediedie will send a kill -9 to the owning process,
which could be the test runner! Let the test runner decide if it's time to die.
"""
patcher = patch('pyon.core.process.shutdown_or_die')
patcher.start()
self.addCleanup(patcher.stop)
def _start_dependencies(self):
"""
Starts the services declared in the class or instance variable "service_dependencies"
"""
self.clients = DotDict()
svc_deps = getattr(self, "service_dependencies", {})
log.debug("Starting service dependencies. Number=%s" % len(svc_deps))
if not svc_deps:
return
for svc in svc_deps:
config = None
if type(svc) in (tuple, list):
config = svc[1]
svc = svc[0]
# Start the service
self._start_service(svc, config=config)
# Create a client
clcls = service_registry.services[svc].simple_client
self.clients[svc] = clcls(name=svc, node=self.container.node)
log.debug("Service dependencies started")
def _start_service(self, servicename, servicecls=None, config=None):
if servicename and not servicecls:
global scanned_services
if not scanned_services:
service_registry.discover_service_classes()
scanned_services = True
assert servicename in service_registry.services, "Service %s unknown" % servicename
servicecls = service_registry.services[servicename].impl[0]
assert servicecls, "Cannot start service %s" % servicename
if type(servicecls) is str:
mod, cls = servicecls.rsplit('.', 1)
else:
mod = servicecls.__module__
cls = servicecls.__name__
self.container.spawn_process(servicename, mod, cls, config)
def _turn_on_couchdb(self):
# Called via pyon.datastore.datastore.DataStoreManager.get_datastore()
patcher =patch('pyon.datastore.datastore.DatastoreManager.persistent', True)
patcher.start()
self.addCleanup(patcher.stop)
def _turn_on_mockdb(self):
# Called via pyon.datastore.datastore.DataStoreManager.get_datastore()
patcher =patch('pyon.datastore.datastore.DatastoreManager.persistent', False)
patcher.start()
self.addCleanup(patcher.stop)
def _turn_off_force_clean(self):
# Called via pyon.datastore.datastore.DataStoreManager.get_datastore()
patcher =patch('pyon.datastore.datastore.DatastoreManager.force_clean', False)
patcher.start()
self.addCleanup(patcher.stop)
def _patch_out_start_rel(self):
def start_rel_from_url(*args, **kwargs):
return True
patcher = patch('pyon.container.apps.AppManager.start_rel_from_url', start_rel_from_url)
patcher.start()
self.addCleanup(patcher.stop)
def _force_clean(self):
from pyon.core.bootstrap import sys_name
from pyon.datastore.datastore import DatastoreManager
if DatastoreManager.persistent is None:
DatastoreManager.persistent = not CFG.system.mockdb
if not DatastoreManager.persistent:
datastore = MockDB_DataStore()
else:
datastore = CouchDB_DataStore()
dbs = datastore.list_datastores()
things_to_clean = filter(lambda x: x.startswith('%s_' % sys_name), dbs)
try:
for thing in things_to_clean:
datastore.delete_datastore(datastore_name=thing)
datastore.create_datastore(datastore_name=thing)
finally:
datastore.close()
|
Python
| 0
|
@@ -1988,16 +1988,150 @@
_clean()
+%0A from ion.processes.bootstrap.datastore_loader import DatastoreLoader%0A DatastoreLoader.load_datastore('res/dd')
%0A%0A
|
3905327d8cb02c6c7929f6b3bd12658c6bc1b6ab
|
bump to 1.73
|
pyperform/__init__.py
|
pyperform/__init__.py
|
from __future__ import print_function
__version__ = '1.72'
from pyperform.benchmark import Benchmark
from .comparisonbenchmark import ComparisonBenchmark
from .benchmarkedclass import BenchmarkedClass
from .benchmarkedfunction import BenchmarkedFunction
from .timer import timer
from .exceptions import ValidationError
def enable():
"""
Enable all benchmarking.
"""
Benchmark.enable = True
ComparisonBenchmark.enable = True
BenchmarkedFunction.enable = True
BenchmarkedClass.enable = True
def disable():
"""
Disable all benchmarking.
"""
Benchmark.enable = False
ComparisonBenchmark.enable = False
BenchmarkedFunction.enable = False
BenchmarkedClass.enable = False
|
Python
| 0.000001
|
@@ -54,9 +54,9 @@
'1.7
-2
+3
'%0A%0A%0A
|
813fac88b392f81825d60f3862a09718f12bf424
|
add ccsd
|
pyquante2/__init__.py
|
pyquante2/__init__.py
|
from pyquante2.basis.basisset import basisset
from pyquante2.basis.cgbf import cgbf,sto
from pyquante2.basis.pgbf import pgbf
from pyquante2.geo.molecule import molecule
from pyquante2.geo.samples import *
from pyquante2.graphics.vtkplot import vtk_orbs
from pyquante2.grid.grid import grid
from pyquante2.ints.one import S,T,V
from pyquante2.pt.mp2 import mp2
from pyquante2.scf.hamiltonians import rhf,uhf
try:
import matplotlib
from pyquante2.graphics.lineplot import lineplot_orbs,line
from pyquante2.graphics.contourplot import contourplot
except:
pass
|
Python
| 0.000001
|
@@ -354,16 +354,51 @@
ort mp2%0A
+from pyquante2.cc.ccsd import ccsd%0A
from pyq
@@ -607,9 +607,4 @@
ass%0A
- %0A
|
89ed1ea77e2e92ae9a953404552a229854ce0f9c
|
Add option to don't add route
|
pyramid_auth/views.py
|
pyramid_auth/views.py
|
from pyramid.view import view_config
from pyramid.httpexceptions import (
HTTPFound,
HTTPForbidden,
)
from pyramid.security import (
unauthenticated_userid,
remember,
forget,
)
from urllib import urlencode
import tw2.core as twc
from . import forms
class BaseView(object):
def __init__(self, context, request):
self.context = context
self.request = request
def forbidden(self):
return {}
class BaseLoginView(BaseView):
def get_validate_func(self):
return self.request.registry.settings[
'authentication.validate_function']
def _get_next_location(self):
login_url = self.request.route_url('login')
referrer = self.request.url
if referrer == login_url:
referrer = '/'
return self.request.params.get('next', referrer)
def login(self):
LoginForm = forms.create_login_form(self.request,
self.get_validate_func())
widget = LoginForm().req()
if self.request.method == 'POST':
try:
data = widget.validate(self.request.POST)
headers = remember(self.request, data['login'])
return HTTPFound(location=self._get_next_location(),
headers=headers)
except twc.ValidationError, e:
widget = e.widget
return dict(widget=widget)
def logout(self):
headers = forget(self.request)
location = self.request.params.get('next', self.request.application_url)
return HTTPFound(location=location, headers=headers)
def forbidden_redirect(self):
if unauthenticated_userid(self.request):
# The user is logged but doesn't have the right permission
location = self.request.route_url('forbidden')
else:
login_url = self.request.route_url('login')
location = '%s?%s' % (login_url, urlencode({'next': self.request.url}))
return HTTPFound(location=location)
def base_includeme(config):
config.add_view(
BaseView,
attr='forbidden',
context=HTTPForbidden,
renderer='auth/forbidden.mak')
def login_includeme(config):
ViewClass = BaseLoginView
config.add_view(
ViewClass,
attr='forbidden_redirect',
context=HTTPForbidden)
config.add_route(
'forbidden',
'/forbidden',
)
config.add_view(
ViewClass,
attr='forbidden',
route_name='forbidden',
renderer='auth/forbidden.mak')
config.add_route(
'login',
'/login',
)
config.add_view(
ViewClass,
attr='login',
route_name='login',
renderer='auth/login.mak')
config.add_route(
'logout',
'/logout',
)
config.add_view(
ViewClass,
attr='logout',
route_name='logout')
|
Python
| 0.000001
|
@@ -2080,24 +2080,104 @@
me(config):%0A
+ if config.registry.settings.get('authentication.no_routes'):%0A return%0A
config.a
@@ -2326,24 +2326,104 @@
me(config):%0A
+ if config.registry.settings.get('authentication.no_routes'):%0A return%0A
ViewClas
|
d6aa9fee952b526e6423fb1020c2246e621d375e
|
Fix __repr__ method
|
pytablereader/data.py
|
pytablereader/data.py
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
from __future__ import absolute_import
import hashlib
from .error import InvalidTableNameError
from .error import InvalidHeaderNameError
import dataproperty
import pathvalidate
def validate_table_name(name):
"""
:param str name: Table name to validate.
:raises InvalidTableNameError: |raises_validate_table_name|
"""
try:
pathvalidate.validate_sqlite_table_name(name)
except pathvalidate.InvalidReservedNameError as e:
raise InvalidTableNameError(e)
except pathvalidate.NullNameError:
raise InvalidTableNameError("table name is empty")
except pathvalidate.InvalidCharError as e:
raise InvalidTableNameError(e)
class TableData(object):
"""
Class to represent a table data structure.
"""
@property
def table_name(self):
"""
:return: The table name.
:rtype: str
"""
return self.__table_name
@property
def header_list(self):
"""
:return: List of table header names.
:rtype: list
"""
return self.__header_list
@property
def record_list(self):
"""
:return: List of table data records.
:rtype: list
"""
return self.__record_list
def __init__(self, table_name, header_list, record_list):
validate_table_name(table_name)
self.__table_name = table_name
self.__header_list = header_list
self.__record_list = record_list
self.__sanitize_header_list()
def __repr__(self):
return "table_name={}, header_list={} record_list={}".format(
self.table_name, self.header_list, self.record_list)
def __eq__(self, other):
return all([
self.table_name == other.table_name,
self.header_list == other.header_list,
self.record_list == other.record_list,
])
def __hash__(self):
body = self.table_name + str(self.header_list) + str(self.record_list)
return hashlib.sha1(body.encode("utf-8")).hexdigest()
def validate_header(self, header):
"""
No operation.
This method called for each table header.
Override this method in subclass if you want to detect
invalid table header element.
Raise
:py:class:`~pytablereader.error.InvalidHeaderNameError`
if an element is invalid.
:param string header: Table header name.
"""
def rename_header(self, i):
"""
This method called when :py:meth:`.validate_header` method raise
:py:class:`~pytablereader.error.InvalidHeaderNameError`.
Override this method in subclass if you want to rename invalid
table header element.
:param int i: Table header index.
:return: Renamed header name.
:rtype: str
:raises pytablereader.error.InvalidHeaderNameError: Always raised.
"""
raise InvalidHeaderNameError(self.header_list[i])
def is_empty_record(self):
"""
:return: ``True`` if the data records of the table is empty.
:rtype: bool
"""
return dataproperty.is_empty_sequence(self.record_list)
def as_dict(self):
"""
:return: Table data as a dictionary.
:rtype: dict
"""
return {
"table_name": self.table_name,
"header_list": self.header_list,
"record_list": self.record_list,
}
def as_dataframe(self):
"""
:return: Table data as a Pandas data frame.
:rtype: pandas.DataFrame
.. note::
``Pandas`` package required to execute this method.
"""
import pandas
dataframe = pandas.DataFrame(self.record_list)
dataframe.columns = self.header_list
return dataframe
def __sanitize_header_list(self):
new_header_list = []
for i, header in enumerate(self.header_list):
try:
self.validate_header(header)
new_header = header
except InvalidHeaderNameError:
new_header = self.rename_header(i)
new_header_list.append(new_header)
self.__header_list = new_header_list
|
Python
| 0.002434
|
@@ -1665,16 +1665,17 @@
_list=%7B%7D
+,
record_
|
b8217b623a11687bbc0f1a558217d8b63c5534c1
|
Create RLock() early to avoid exception at shutdown (#351)
|
rclpy/rclpy/handle.py
|
rclpy/rclpy/handle.py
|
# Copyright 2019 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from threading import RLock
import weakref
from rclpy.impl.implementation_singleton import rclpy_pycapsule_implementation as _rclpy_capsule
class InvalidHandle(Exception):
pass
class Handle:
"""
Wrap a pycapsule object for thread-safe early destruction.
This is intended to be used as a context manager, meaning using the ``with`` keyword.
::
with subscription.handle as pycapsule:
...
This class assumes the passed pycapsule has a destructor.
When this class destroys the capsule, it will call the destructor.
Then it will set the destructor to NULL so it is not called a second time when the capsule is
garbage collected.
If :meth:`destroy` is never called then the pycapsule will be destructed when it is
garbage collected.
"""
def __init__(self, pycapsule):
self.__capsule = pycapsule
self.__use_count = 0
self.__request_invalidation = False
self.__valid = True
self.__rlock = RLock()
self.__required_handles = []
self.__dependent_handles = weakref.WeakSet()
self.__destroy_callbacks = []
# Called to give an opportunity to raise an exception if the object is not a pycapsule.
self.__capsule_name = _rclpy_capsule.rclpy_pycapsule_name(pycapsule)
self.__capsule_pointer = _rclpy_capsule.rclpy_pycapsule_pointer(pycapsule)
def __eq__(self, other):
return self.__capsule_pointer == other.__capsule_pointer
def __hash__(self):
return self.__capsule_pointer
@property
def name(self):
"""
Get the name of the managed pycapsule.
rclpy uses the name of the C type the pycapsule holds a pointer to.
:return: name of the pycapsule
"""
return self.__capsule_name
@property
def pointer(self):
"""
Get the address held by the managed pycapsule.
:return: address of the pycapsule
"""
return self.__capsule_pointer
def destroy(self, then=None):
"""
Destroy pycapsule as soon as possible without waiting for garbage collection.
:param then: callback to call after handle has been destroyed.
"""
with self.__rlock:
if not self.__valid:
raise InvalidHandle('Asked to destroy handle, but it was already destroyed')
if then:
self.__destroy_callbacks.append(then)
self.__request_invalidation = True
if 0 == self.__use_count:
self.__destroy()
def requires(self, req_handle):
"""
Indicate that this handle requires another handle to live longer than itself.
Calling :meth:`destroy` on the passed in handle will cause this handle to be
destroyed first.
This handle will hold a reference to the passed in handle so the required handle is
garbage collected after this handle in case :meth:`destroy` is not called.
"""
assert isinstance(req_handle, Handle)
with self.__rlock, req_handle.__rlock:
if not self.__valid:
raise InvalidHandle('Cannot require a new handle if already destroyed')
if req_handle.__valid:
self.__required_handles.append(req_handle)
req_handle.__dependent_handles.add(self)
else:
# required handle destroyed before we could link to it, destroy self
self.destroy()
def _get_capsule(self):
"""
Get the pycapsule managed by this handle.
The capsule must be returned using :meth:`_return_capsule` when it is no longer in use.
:return: PyCapsule instance
"""
with self.__rlock:
if not self.__valid:
raise InvalidHandle('Tried to use a handle that has been destroyed.')
self.__use_count += 1
return self.__capsule
def _return_capsule(self):
"""
Return the pycapsule that was previously gotten with :meth:`_get_capsule`.
:return: None
"""
with self.__rlock:
# Assume _return_capsule is not called more times than _get_capsule
assert self.__use_count > 0
self.__use_count -= 1
if 0 == self.__use_count and self.__request_invalidation:
self.__destroy()
def __enter__(self):
return self._get_capsule()
def __exit__(self, type_, value, traceback):
self._return_capsule()
def __destroy(self):
# Assume no one is using the capsule anymore
assert self.__use_count == 0
# Assume someone has asked it to be destroyed
assert self.__request_invalidation
# mark as invalid so no one else tries to use it
self.__valid = False
self.__destroy_dependents(then=self.__destroy_self)
def __destroy_dependents(self, then):
# assumes self.__rlock is held
deps_lock = RLock()
# Turn weak references to regular references
dependent_handles = [dep for dep in self.__dependent_handles]
if not dependent_handles:
# no dependents to wait on
then()
return
def remove_dependent(handle):
nonlocal dependent_handles
nonlocal deps_lock
nonlocal then
with deps_lock:
dependent_handles.remove(handle)
if 0 == len(dependent_handles):
# all dependents destroyed, do what comes next
then()
for dep in self.__dependent_handles:
try:
dep.destroy(then=remove_dependent)
except InvalidHandle:
# Dependent was already destroyed
remove_dependent(dep)
def __destroy_self(self):
with self.__rlock:
# Calls pycapsule destructor
_rclpy_capsule.rclpy_pycapsule_destroy(self.__capsule)
# Call post-destroy callbacks
while self.__destroy_callbacks:
self.__destroy_callbacks.pop()(self)
# get rid of references to other handles to break reference cycles
del self.__required_handles
|
Python
| 0
|
@@ -1589,32 +1589,155 @@
rlock = RLock()%0A
+ # Create this early because RLock() can raise during interpeter shutdown%0A self.__dependents_rlock = RLock()%0A
self.__r
@@ -5704,31 +5704,47 @@
deps_lock =
-RL
+self.__dependents_rl
ock
-()
%0A # T
|
9928dc98e75d069a0f1a332502c3dfd1cb794733
|
Add new filename pattern.
|
renamer/plugins/tv.py
|
renamer/plugins/tv.py
|
import re
import urllib
from BeautifulSoup import BeautifulSoup
from renamer import plugins
@plugins.command
def find_tv_parts(env, src):
patterns = [
re.compile(r'(?P<series_name>.*?) - [sS](?P<season>\d+)[eE](?P<ep>\d+) - .*\.(?P<ext>.*?)$'), # Profiler - S01E01 - Insight.avi
re.compile(r'(?P<series_name>.*?) \[(?P<season>\d+)x(?P<ep>\d{2})\] - .*\.(?P<ext>.*?)$'), # Heroes [1x01] - Genesis.avi
re.compile(r'(?P<series_name>.*?) [sS](?P<season>\d+)[eE](?P<ep>\d{2}) .*\.(?P<ext>.*?)$'), # Heroes S01E10 HDTV XviD.avi
re.compile(r'(?P<series_name>.*?)\.(?P<season>\d)(?P<ep>\d{2}).*\.(?P<ext>.*?)$'), # heroes.108.hdtv-lol.avi
re.compile(r'(?P<series_name>.*?)\.(?P<season>\d)(?P<ep>\d{2})\.(?P<ext>.*?)$'), # arrested.development.302.avi
re.compile(r'(?P<series_name>.*?)\.[sS](?P<season>\d+)[eE](?P<ep>\d{2}).*\.(?P<ext>.*?)$'), # Heroes.S01E11.HDTV.XviD-K4RM4.avi
re.compile(r'(?P<series_name>.*?) - (?P<season>\d)(?P<ep>\d{2}).*\.(?P<ext>.*?)$'), # How I Met Your Mother - 101 - Pilot.avi
re.compile(r'(?P<series_name>.*?)\.[sS](?P<season>\d+)[eE](?P<ep>\d+).*\.(?P<ext>.*?)$'), # 24.s6e4.dvdrip.xvid-aerial.avi
re.compile(r'(?P<series_name>.*?)\.-\.(?P<season>\d)x(?P<ep>\d{2}).*\.(?P<ext>.*?)$'), # harsh.realm.-.1x01.-.pilot.avi
re.compile(r'(?P<series_name>.*?)_[sS](?P<season>\d+)[eE](?P<ep>\d+).*\.(?P<ext>.*?)$'), # DayBreak_S01E09.avi
re.compile(r'(?P<series_name>.*?) - (?P<season>\d+)[xX](?P<ep>\d+) - .*\.(?P<ext>.*?)$'), # Xena - 2x05 - Return of Callisto.avi
]
for pattern in patterns:
m = pattern.search(src)
if m is not None:
d = m.groupdict()
return d['series_name'], d['season'], d['ep'], d['ext']
raise plugins.PluginError('No patterns could be found in %r' % (src))
_epg_cache = {}
@plugins.command
def epguides(env, key, urlSegment):
URL = 'http://epguides.com/'
url = URL + urlSegment.replace(' ', '')
def gatherKeys(url):
epKeyExpr = re.compile(r'\s+(?:\d+\.)?\s+(?P<key>[\dSP]-(?:(?: \d)|(?:\d{2})))(?:\s+)?\d+')
soup = BeautifulSoup(urllib.urlopen(url))
keys = {}
contents = soup.pre.contents
while contents:
line = contents[0]
if isinstance(line, basestring):
match = epKeyExpr.search(line)
if match:
key = match.groupdict()['key'].lower()
contents.pop(0)
keys[key] = contents[0].contents[0]
contents.pop(0)
return keys
keys = _epg_cache.get(url, None)
if keys is None:
keys = _epg_cache[url] = gatherKeys(url)
key = key.lower()
try:
return keys[key]
except KeyError:
if key == '1- 1':
return keys['p- 1']
else:
raise
@plugins.command
def tvrage(env, key, showName):
qs = urllib.urlencode([('show', showName), ('ep', key)])
url = 'http://www.tvrage.com/quickinfo.php?%s' % (qs,)
data = {}
for line in urllib.urlopen(url):
key, value = line.strip().split('@', 1)
data[key] = value.split('^')
return data['Episode Info'][1]
|
Python
| 0.000001
|
@@ -1563,24 +1563,171 @@
allisto.avi%0A
+ re.compile(r'(?P%3Cseries_name%3E.*?)_(?P%3Cseason%3E%5Cd+)%5BxX%5D(?P%3Cep%3E%5Cd+)_.*%5C.(?P%3Cext%3E.*?)$'), # Xena_4x02_Adventures In The Sin Trade - Part 2.avi%0A
%5D%0A%0A f
|
f808b67c9a067d9addd75f09e10853c3812d6101
|
Refactor code
|
transfers/examples/pre-transfer/00_unbag.py
|
transfers/examples/pre-transfer/00_unbag.py
|
#!/usr/bin/env python
# Script to re-package unzipped bags as standard transfers, utilizing checksums from bag manifest.
# Assumes bags are structured as either bag/data/(content) or bag/data/objects/(content).
# Enables use of scripts to add metadata to SIP without failing transfer at bag validation.
from __future__ import print_function, unicode_literals
import os
import shutil
import sys
def main(transfer_path):
transfer_path = os.path.abspath(transfer_path)
# check if transfer is an unzipped bag
if not os.path.isfile(os.path.join(transfer_path, 'bag-info.txt')):
return 1
# move files in data up one level if 'objects' folder already exists
data_path = os.path.join(transfer_path, 'data')
if os.path.isdir(os.path.join(data_path, 'objects')):
data_contents = os.listdir(data_path)
data_contents = [os.path.abspath(data_path) + '/' + filename for filename in data_contents]
for f in data_contents:
shutil.move(f, transfer_path)
# otherwise, rename data to objects
else:
os.rename(data_path, os.path.join(transfer_path, 'objects'))
# create metadata and subdoc folders if don't already exist
metadata_dir = os.path.join(transfer_path, 'metadata')
subdoc_dir = os.path.join(metadata_dir, 'submissionDocumentation')
if not os.path.isdir(metadata_dir):
os.mkdir(metadata_dir)
if not os.path.isdir(subdoc_dir):
os.mkdir(subdoc_dir)
# write manifest checksums to checksum file
with open(os.path.join(transfer_path, 'manifest-md5.txt'), 'r') as old_file:
with open (os.path.join(metadata_dir, 'checksum.md5'), 'w') as new_file:
manifest_content = old_file.readlines()
for line in manifest_content:
if "data/objects/" in line:
new_line = line.replace("data/objects/", "../objects/")
else:
new_line = line.replace("data/", "../objects/")
new_file.write(new_line)
# move bag files to submissionDocumentation
for bagfile in 'bag-info.txt', 'bagit.txt', 'manifest-md5.txt', 'tagmanifest-md5.txt':
shutil.copy2(os.path.join(transfer_path, bagfile), os.path.join(subdoc_dir, bagfile))
os.remove(os.path.join(transfer_path, bagfile))
return 0
if __name__ == '__main__':
transfer_path = sys.argv[1]
main(transfer_path)
|
Python
| 0.000002
|
@@ -863,23 +863,20 @@
os.path.
-abspath
+join
(data_pa
@@ -881,17 +881,9 @@
path
-) + '/' +
+,
fil
@@ -887,16 +887,17 @@
filename
+)
for fil
@@ -1677,92 +1677,28 @@
-manifest_content = old_file.readlines()%0A for line in manifest_content
+for line in old_file
:%0A
|
187c6c89ef7cc319b4357cae8ea7f93c24448b77
|
Add discussion forum CSS to forum XBlock render.
|
common/lib/xblock/discussion/discussion_forum/discussion_forum.py
|
common/lib/xblock/discussion/discussion_forum/discussion_forum.py
|
import logging
from xblock.core import XBlock
from xblock.fields import Scope, String, UNIQUE_ID
from xblock.fragment import Fragment
from .utils import (
render_template,
render_mako_template,
render_mustache_templates,
asset_to_static_url,
add_resources_to_fragment)
log = logging.getLogger(__name__)
@XBlock.needs('discussion')
class DiscussionXBlock(XBlock):
""" Provides functionality similar to discussion XModule in inline mode """
FIELDS_TO_INIT = ('discussion_id',)
discussion_id = String(scope=Scope.settings, default=UNIQUE_ID)
display_name = String(
display_name="Display Name",
help="Display name for this module",
default="Discussion",
scope=Scope.settings
)
data = String(
help="XML data for the problem",
scope=Scope.content,
default="<discussion></discussion>"
)
discussion_category = String(
display_name="Category",
default="Week 1",
help="A category name for the discussion. This name appears in the left pane of the discussion forum for the course.",
scope=Scope.settings
)
discussion_target = String(
display_name="Subcategory",
default="Topic-Level Student-Visible Label",
help="A subcategory name for the discussion. This name appears in the left pane of the discussion forum for the course.",
scope=Scope.settings
)
sort_key = String(scope=Scope.settings)
@property
def course_id(self):
"""
:return: int course id
"""
return unicode(self.location.course_key)
def student_view(self, context=None): # pylint: disable=unused-argument
""" Renders student view for LMS and Studio """
# pylint: disable=no-member
if hasattr(self, 'xmodule_runtime') and getattr(self.xmodule_runtime, 'is_author_mode', False):
fragment = self._student_view_studio()
else:
fragment = self._student_view_lms()
return fragment
def _student_view_lms(self):
""" Renders student view for LMS """
fragment = Fragment()
discussion_service = self.xmodule_runtime.service(self, 'discussion') # pylint: disable=no-member
context = discussion_service.get_inline_template_context()
context['discussion_id'] = self.discussion_id
fragment.add_content(render_mako_template('discussion/_discussion_inline.html', context))
add_resources_to_fragment(fragment)
fragment.add_javascript(render_template('static/js/discussion_inline.js', {'course_id': self.course_id}))
fragment.add_content(render_mustache_templates())
fragment.initialize_js('DiscussionInlineBlock')
return fragment
def _student_view_studio(self):
""" Renders student view for Studio """
fragment = Fragment()
fragment.add_content(render_mako_template(
'discussion/_discussion_inline_studio.html',
{'discussion_id': self.discussion_id}
))
fragment.add_css_url(asset_to_static_url('xblock/discussion/css/discussion-studio.css'))
return fragment
@XBlock.json_handler
def studio_submit(self, data, suffix=''): # pylint: disable=unused-argument
""" Handles Studio submit event """
log.info("submitted: {}".format(data))
self.display_name = data.get("display_name", "Untitled Discussion Topic")
self.discussion_category = data.get("discussion_category", None)
self.discussion_target = data.get("discussion_target", None)
return {
"display_name": self.display_name,
"discussion_category": self.discussion_category,
"discussion_target": self.discussion_target
}
def studio_view(self, context=None): # pylint: disable=unused-argument
""" Renders author view for Studio """
fragment = Fragment()
context = {
"display_name": self.display_name,
"discussion_category": self.discussion_category,
"discussion_target": self.discussion_target
}
log.info("rendering template in context: {}".format(context))
fragment.add_content(render_mako_template('discussion/discussion_inline_edit.html', context))
fragment.add_javascript_url(asset_to_static_url('xblock/discussion/js/discussion_inline_edit.js'))
fragment.initialize_js('DiscussionEditBlock')
return fragment
@staticmethod
def workbench_scenarios():
"""A canned scenario for display in the workbench."""
return [
("Discussion XBlock",
"""<vertical_demo>
<discussion-forum/>
</vertical_demo>
"""),
]
@XBlock.needs('discussion')
class DiscussionCourseXBlock(XBlock):
""" Provides functionality similar to discussion XModule in tab mode """
display_name = String(
display_name="Display Name",
help="Display name for this module",
default="Discussion Course",
scope=Scope.settings
)
def student_view(self, context=None): # pylint: disable=unused-argument
""" Renders student view for LMS and Studio """
# pylint: disable=no-member
if hasattr(self, 'xmodule_runtime') and getattr(self.xmodule_runtime, 'is_author_mode', False):
fragment = self._student_view_studio()
else:
fragment = self._student_view_lms()
return fragment
def _student_view_lms(self):
""" Renders student view for LMS """
fragment = Fragment()
fragment.add_css_url(asset_to_static_url('xblock/discussion/css/discussion-course-custom.css'))
discussion_service = self.xmodule_runtime.service(self, 'discussion') # pylint: disable=no-member
context = discussion_service.get_course_template_context()
context['enable_new_post_btn'] = True
add_resources_to_fragment(fragment)
fragment.add_content(render_mako_template('discussion/_discussion_course.html', context))
fragment.add_javascript(render_template('static/js/discussion_course.js', {
'course_id': self.course_id
}))
fragment.add_content(render_mustache_templates())
fragment.initialize_js('DiscussionCourseBlock')
return fragment
def _student_view_studio(self):
""" Renders student view for Studio """
fragment = Fragment()
fragment.add_content(render_mako_template('discussion/_discussion_course_studio.html'))
fragment.add_css_url(asset_to_static_url('xblock/discussion/css/discussion-studio.css'))
return fragment
def studio_view(self, context=None): # pylint: disable=unused-argument
""" Renders author view Studio """
return Fragment()
|
Python
| 0
|
@@ -2126,32 +2126,110 @@
nt = Fragment()%0A
+ fragment.add_css_url(asset_to_static_url('css/discussion-forum.css'))%0A
discussi
@@ -5695,32 +5695,110 @@
nt = Fragment()%0A
+ fragment.add_css_url(asset_to_static_url('css/discussion-forum.css'))%0A
fragment
|
e6756fbe74256a862f2457160b8c71e57ae070cb
|
Use `is` operator for comparing with `None` (Pep8)
|
skyfield/tests/test_keplerian.py
|
skyfield/tests/test_keplerian.py
|
"""Compare the output of Skyfield with the routines from NOVAS for keplerian orbiting bodies"""
import skyfield.keplerianlib
from skyfield.keplerianlib import KeplerianOrbit, ICRCoordinates
from ..timelib import JulianDate, julian_date
DISTANCE_EPSILON = 0.026
def test_semimajorAxisToOrbitalPeriod():
assert skyfield.keplerianlib.semimajorAxisToOrbitalPeriod(1) == 1
assert skyfield.keplerianlib.semimajorAxisToOrbitalPeriod(1.523679) == 1.8807896358663763
assert skyfield.keplerianlib.semimajorAxisToOrbitalPeriod(4.27371348392) == 8.835031547398543
def test_orbitalPeriodToSemimajorAxis():
assert skyfield.keplerianlib.orbitalPeriodToSemimajorAxis(1) == 1
assert skyfield.keplerianlib.orbitalPeriodToSemimajorAxis(1.8807896358663763) == 1.523679
assert skyfield.keplerianlib.orbitalPeriodToSemimajorAxis(8.835031547398543) == 4.27371348392
def test_convergeEccentricAnomaly():
test = skyfield.keplerianlib.convergeEccentricAnomaly(
hoyle_8077['mean_anomaly'],
hoyle_8077['eccentricity'],
15
)
assert test == hoyle_8077['eccentric_anomaly']
def test_instantiate_8077_hoyle():
hoyle = KeplerianOrbit( hoyle_8077['semimajor_axis'],
hoyle_8077['eccentricity'],
hoyle_8077['inclination'],
hoyle_8077['longitude_ascending'],
hoyle_8077['argument_perihelion'],
hoyle_8077['mean_anomaly'],
hoyle_8077['epoch'])
assert hoyle != None
def test_instantiate_coordinates():
coords = ICRCoordinates(x=500.25, y=10.76, z=0.1125)
assert coords != None
def test_coordinatesEquivalence():
coords_the_first = ICRCoordinates(x=500.25, y=10.76, z=0.1125)
coords_the_second = ICRCoordinates(x=500.25, y=10.76, z=0.1125)
assert coords_the_first.equalTo(coords_the_second)
"""
data gotten from Horizons
date: 2456517.500000000 = A.D. 2013-Aug-13 00:00:00.0000 (CT)
expected coords (AU) 2.421251132790093E+00 -1.918893156489506E+00 -9.813409585464707E-02
Horizon Params
Ephemeris Type [change] : VECTORS
Target Body [change] : Asteroid 8077 Hoyle (1986 AW2)
Coordinate Origin [change] : Solar System Barycenter (SSB) [500@0]
Time Span [change] : Start=2013-08-13, Stop=2013-09-12, Step=1 d
Table Settings [change] : defaults
Display/Output [change] : default (formatted HTML)
EPOCH= 2453995.5 ! 2006-Sep-17.00 (CT) Residual RMS= .43359
EC= .2110946491840378 QR= 2.077692130214496 TP= 2454360.1855338747
OM= 135.855972529608 W= 34.4378477722205 IN= 17.25814783060462
A= 2.633639292806857 MA= 275.9015153135912 ADIST= 3.189586455399217
PER= 4.27408 N= .230605479 ANGMOM= .027287332
DAN= 2.14316 DDN= 3.04671 L= 169.07331
B= 9.658454799999999 MOID= 1.10581994 TP= 2007-Sep-16.6855338747
"""
hoyle_8077 = {
'semimajor_axis' : 2.633278254269645,
'eccentricity' : .2109947010748546,
'inclination' : 17.25945395594321,
'longitude_ascending' : 135.8512354853258,
'argument_perihelion' : 34.46503170092878,
'mean_anomaly' : 330.9918926661418,
'eccentric_anomaly' : 4.0942988262501965,
'epoch' : JulianDate(tt=(2007, 5, 14)),
}
def test_get_8077_hoyle_ecliptic_on_dev_sprint_day_2():
hoyle = KeplerianOrbit( hoyle_8077['semimajor_axis'],
hoyle_8077['eccentricity'],
hoyle_8077['inclination'],
hoyle_8077['longitude_ascending'],
hoyle_8077['argument_perihelion'],
hoyle_8077['mean_anomaly'],
hoyle_8077['epoch'])
date = JulianDate(tt=(2013, 8, 13))
# print date.tt
test = hoyle.getECLCoordinatesOnJulianDate(date)
#print test
epsilon = 2e-2
assert abs(test.x - 2.421251271197979) < epsilon
assert abs(test.y - -1.918893007049262) < epsilon
assert abs(test.z - -0.09813403009731327) < epsilon
|
Python
| 0.000287
|
@@ -1556,18 +1556,22 @@
t hoyle
-!=
+is not
None%0A%0Ad
@@ -1685,10 +1685,14 @@
rds
-!=
+is not
Non
|
2fbd5ceead47ea980e5dfa7b2bc29eafbbab2d72
|
remove unneeded import in views
|
blog/views.py
|
blog/views.py
|
from django.core.exceptions import ObjectDoesNotExist
from django.http import Http404
from django.shortcuts import render, get_object_or_404, get_list_or_404
from django.utils import timezone
from . import models as blog
def home(request):
NUM_LAST_ARTICLES = 5
articles = blog.Article.objects.filter(date__lte=timezone.now()).order_by('-date')[:NUM_LAST_ARTICLES]
return render(request, 'blog/article.html', {'isroot': True, 'articles': articles})
def article(request, slug=None):
if slug is None:
articles = get_list_or_404(blog.Article)
else:
articles = get_list_or_404(blog.Article, slug=slug)
return render(request, 'blog/article.html', {
'isroot': bool(slug is None),
'articles': articles
})
def category(request, slug=None):
if slug is None:
categories = get_list_or_404(blog.Category)
else:
categories = get_list_or_404(blog.Category, slug=slug)
return render(request, 'blog/category.html', {
'isroot': bool(slug is None),
'categories': categories,
})
|
Python
| 0
|
@@ -1,58 +1,4 @@
-from django.core.exceptions import ObjectDoesNotExist%0A
from
|
0b059cb8368f2b6ce5782f3d6fc87bc1479d96d8
|
remove a print
|
boink/cdbg.py
|
boink/cdbg.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# (c) Camille Scott, 2019
# File : cdbg.py
# License: MIT
# Author : Camille Scott <camille.scott.w@gmail.com>
# Date : 14.10.2019
from boink import libboink
from boink.cli import CommandRunner, get_output_interval_args
from boink.dbg import get_graph_args, process_graph_args
from boink.parsing import get_pairing_args, iter_fastx_inputs
from boink.serialization import cDBGSerialization
from boink.metadata import CUR_TIME
import os
import sys
def get_cdbg_args(parser):
default_prefix = 'boink.build-cdbg.' + CUR_TIME
parser.default_prefix = default_prefix
group = parser.add_argument_group('cDBG')
group.add_argument('--results-dir',
default=default_prefix)
group.add_argument('--normalize',
type=int,
nargs='?',
const=10)
group.add_argument('--save-cdbg',
metavar='PREFIX.<format>',
nargs='?',
const='boink.cdbg.graph')
group.add_argument('--save-cdbg-format',
nargs='+',
choices=cDBGSerialization.FORMATS,
default=['gfa1'])
group.add_argument('--track-cdbg-stats',
metavar='FILE_NAME.csv',
nargs='?',
const='boink.cdbg.stats.csv')
group.add_argument('--track-cdbg-history',
metavar='FILENAME.graphml',
nargs='?',
const='boink.cdbg.history.graphml')
group.add_argument('--track-cdbg-components',
metavar='FILE_NAME.csv',
nargs='?',
const='boink.cdbg.components.csv')
group.add_argument('--component-sample-size',
type=int,
default=10000)
group.add_argument('--track-cdbg-unitig-bp',
metavar='FILENAME.csv',
nargs='?',
const='boink.cdbg.unitigs.bp.csv')
group.add_argument('--unitig-bp-bins',
nargs='+',
type=int)
group.add_argument('--validate',
metavar='FILENAME.csv',
nargs='?',
const='boink.cdbg.validation.csv')
return group
def process_cdbg_args(args):
def join(p):
return p if p is None else os.path.join(args.results_dir, p)
args.track_cdbg_stats = join(args.track_cdbg_stats)
args.track_cdbg_components = join(args.track_cdbg_components)
args.track_cdbg_history = join(args.track_cdbg_history)
args.save_cdbg = join(args.save_cdbg)
args.track_cdbg_unitig_bp = join(args.track_cdbg_unitig_bp)
def print_cdbg_args(args):
print('* cDBG Params', file=sys.stderr)
print('* Directory: ', args.results_dir, file=sys.stderr)
if args.save_cdbg:
print('* Saving cDBG every {0} sequences with file prefix {1}'.format(args.coarse_interval,
args.save_cdbg),
file=sys.stderr)
print('* cDBG save formats: {0}'.format(', '.join(args.save_cdbg_format)))
if args.track_cdbg_stats:
print('* Tracking cDBG stats and reporting every {0} sequences'.format(args.fine_interval),
file=sys.stderr)
print('* Saving tracking information to', args.track_cdbg_stats, file=sys.stderr)
if args.track_cdbg_history:
print('* Tracking cDBG history and saving to', args.track_cdbg_history, file=sys.stderr)
if args.validate:
print('* cDBG will be validated on completion and results saved to', args.validate,
file=sys.stderr)
print('*', '*' * 10, '*', sep='\n', file=sys.stderr)
class cDBGRunner(CommandRunner):
def __init__(self, parser):
get_graph_args(parser)
get_cdbg_args(parser)
get_output_interval_args(parser)
group = get_pairing_args(parser)
group.add_argument('-o', dest='output_filename', default='/dev/stdout')
group.add_argument('-i', dest='inputs', nargs='+', required=True)
super().__init__(parser)
def postprocess_args(self, args):
process_graph_args(args)
process_cdbg_args(args)
def setup(self, args):
print('boink cdbg: start setup.', file=sys.stderr)
os.makedirs(args.results_dir, exist_ok=True)
self.dbg_t = args.graph_t
self.hasher = args.hasher_t(args.ksize)
self.storage = args.storage.build(*args.storage_args)
self.dbg = args.graph_t.build(self.hasher, self.storage)
self.cdbg_t = libboink.cdbg.cDBG[type(self.dbg)]
self.compactor_t = libboink.cdbg.StreamingCompactor[type(self.dbg)]
self.compactor = self.compactor_t.Compactor.build(self.dbg)
print(args)
if args.normalize:
self.processor = self.compactor_t.NormalizingCompactor[libboink.parsing.FastxReader].build(self.compactor,
args.normalize,
args.fine_interval,
args.medium_interval,
args.coarse_interval)
else:
self.processor = self.compactor_t.Processor.build(self.compactor,
args.fine_interval,
args.medium_interval,
args.coarse_interval)
if args.track_cdbg_stats:
self.cdbg_reporter = self.compactor_t.Reporter.build(self.compactor,
args.track_cdbg_stats)
self.processor.register_listener(self.cdbg_reporter)
if args.track_cdbg_unitig_bp:
if args.unitig_bp_bins is None:
bins = [args.ksize, 100, 200, 500, 1000]
else:
bins = args.unitig_bp_bins
self.unitig_reporter = self.cdbg_t.UnitigReporter.build(self.compactor.cdbg.__smartptr__(),
args.track_cdbg_unitig_bp,
bins)
self.processor.register_listener(self.unitig_reporter)
if args.track_cdbg_components:
self.components = self.cdbg_t.ComponentReporter.build(self.compactor.cdbg.__smartptr__(),
args.track_cdbg_components,
args.component_sample_size)
self.processor.register_listener(self.components)
self.writers = []
if args.save_cdbg:
for cdbg_format in args.save_cdbg_format:
writer = self.cdbg_t.Writer.build(self.compactor.cdbg.__smartptr__(),
cDBGSerialization.enum_from_str(cdbg_format),
args.save_cdbg + '.' + cdbg_format)
self.processor.register_listener(writer)
self.writers.append(writer)
print('boink cdbg: finish setup.', file=sys.stderr)
def execute(self, args):
print('boink cdbg: start execution.', file=sys.stderr)
for sample, prefix in iter_fastx_inputs(args.inputs, args.pairing_mode):
for n_reads, state in self.processor.chunked_process(*sample):
print('...processed {0} sequences.'.format(n_reads))
print('\t', self.processor.n_invalid(), 'invalid,', self.processor.n_too_short(), 'too short.')
def teardown(self):
pass
|
Python
| 0.005113
|
@@ -5013,27 +5013,8 @@
bg)%0A
- print(args)
%0A
|
5b0e1ad40ab25dbe9e3a52e3517974e08a7fac5f
|
Fix wrong method name
|
envsettings/email.py
|
envsettings/email.py
|
from __future__ import absolute_import
import email.utils as email_utils
from .base import URLConfigBase, is_importable
class EmailConfig(URLConfigBase):
CONFIG = {
'smtp': {'EMAIL_BACKEND': 'django.core.mail.backends.smtp.EmailBackend',
'EMAIL_USE_TLS': False},
'smtps': {'EMAIL_BACKEND': 'django.core.mail.backends.smtp.EmailBackend',
'EMAIL_USE_TLS': True},
'file': {'EMAIL_BACKEND': 'django.core.mail.backends.filebased.EmailBackend'},
'mailgun': {'EMAIL_BACKEND': 'django_mailgun.MailgunBackend'},
'sendgrid': {'EMAIL_BACKEND': 'sgbackend.SendGridBackend'},
'mandrill': {'EMAIL_BACKEND': 'djrill.mail.backends.djrill.DjrillBackend'},
'ses': {'EMAIL_BACKEND': 'django_ses_backend.SESBackend'},
'postmark': {'EMAIL_BACKEND': 'postmark.django_backend.EmailBackend'},
}
@staticmethod
def parse_address_list(address_string):
return email_utils.getaddresses([address_string])
def get_email_list(self, keys, default=None):
return super(EmailConfig, self).get(keys, default,
convert=self.parse_email_list)
def handle_smtp(self, parsed_url, config):
if config.get('EMAIL_USE_TLS'):
default_port = 587
elif config.get('EMAIL_USE_SSL'):
default_port = 465
else:
default_port = 25
config.update({
'EMAIL_HOST': parsed_url.hostname or 'localhost',
'EMAIL_PORT': parsed_url.port or default_port,
'EMAIL_HOST_USER': parsed_url.username or '',
'EMAIL_HOST_PASSWORD': parsed_url.password or ''})
return config
def handle_smtps(self, parsed_url, config):
return self.handle_smtp(parsed_url, config)
def handle_file(self, parsed_url, config):
if parsed_url.path == '/dev/stdout':
config['EMAIL_BACKEND'] = 'django.core.mail.backends.console.EmailBackend'
elif parsed_url.path == '/dev/null':
config['EMAIL_BACKEND'] = 'django.core.mail.backends.dummy.EmailBackend'
else:
config['EMAIL_FILE_PATH'] = parsed_url.path
return config
def handle_mailgun(self, parsed_url, config):
config['MAILGUN_ACCESS_KEY'] = parsed_url.password
config['MAILGUN_SERVER_NAME'] = parsed_url.hostname
return config
def auto_config_mailgun(self, environ):
try:
api_key, login, password, server, port = [
environ['MAILGUN_' + key] for key in (
'API_KEY', 'SMTP_LOGIN', 'SMTP_PASSWORD',
'SMTP_SERVER', 'SMTP_PORT')]
except KeyError:
return
if is_importable(self.CONFIG['mailgun']['EMAIL_BACKEND']):
domain = login.split('@')[-1]
return 'mailgun://api:{api_key}@{domain}'.format(
api_key=api_key, domain=domain)
else:
return 'smtps://{login}:{password}@{server}:{port}'.format(
login=login, password=password, server=server, port=port)
def handle_sendgrid(self, parsed_url, config):
config['SENDGRID_USER'] = parsed_url.username
config['SENDGRID_PASSWORD'] = parsed_url.password
return config
def auto_config_sendgrid(self, environ):
try:
user, password = environ['SENDGRID_USERNAME'], environ['SENDGRID_PASSWORD']
except KeyError:
return
if is_importable(self.CONFIG['sendgrid']['EMAIL_BACKEND']):
return 'sendgrid://{user}:{password}@sendgrid.com'.format(
user=user, password=password)
else:
return 'smtps://{user}:{password}@smtp.sendgrid.net:587'.format(
user=user, password=password)
def handle_mandrill(self, parsed_url, config):
config['MANDRILL_API_KEY'] = parsed_url.password
if parsed_url.username:
config['MANDRILL_SUBACCOUNT'] = parsed_url.username
return config
def auto_config_mandrill(self, environ):
try:
user, api_key = environ['MANDRILL_USERNAME'], environ['MANDRILL_APIKEY']
except KeyError:
return
if is_importable(self.CONFIG['mandrill']['EMAIL_BACKEND']):
return 'mandrill://:{api_key}@mandrillapp.com'.format(
api_key=api_key)
else:
return 'smtps://{user}:{api_key}@smtp.mandrillapp.com:587'.format(
user=user, api_key=api_key)
def handle_ses(self, parsed_url, config):
if parsed_url.username:
config['AWS_SES_ACCESS_KEY_ID'] = parsed_url.username
if parsed_url.password:
config['AWS_SES_SECRET_ACCESS_KEY'] = parsed_url.password
if parsed_url.hostname:
if '.' in parsed_url.hostname:
config['AWS_SES_REGION_ENDPOINT'] = parsed_url.hostname
else:
config['AWS_SES_REGION_NAME'] = parsed_url.hostname
return config
def handle_postmark(self, parsed_url, config):
config['POSTMARK_API_KEY'] = parsed_url.password
return config
def auto_config_postmark(self, environ):
try:
api_key, server = (environ['POSTMARK_API_KEY'],
environ['POSTMARK_SMTP_SERVER'])
except KeyError:
return
if is_importable(self.CONFIG['postmark']['EMAIL_BACKEND']):
return 'postmark://user:{api_key}@postmarkapp.com'.format(
api_key=api_key)
else:
return 'smtps://{api_key}:{api_key}@{server}:25'.format(
api_key=api_key, server=server)
|
Python
| 0.999997
|
@@ -999,21 +999,23 @@
def get_
-email
+address
_list(se
@@ -1133,21 +1133,23 @@
f.parse_
-email
+address
_list)%0A%0A
|
62c76a953ea5a1c753f9c7447bab5800bb25c2b1
|
add life expantency context bulk down for ihme
|
ddf_utils/factory/igme.py
|
ddf_utils/factory/igme.py
|
# -*- coding: utf-8 -*-
"""download sources from CME info portal"""
__doc__ = """T.B.D"""
import os.path as osp
import re
import requests
import pandas as pd
from io import BytesIO
from lxml import html
from urllib.parse import urlsplit, urljoin
url = 'http://www.childmortality.org/'
metadata = None
def load_metadata():
r = requests.get(url)
h = html.fromstring(r.content)
flist = []
for l in h.xpath('//a/@href'):
if l.endswith('xlsx'):
#print(urljoin(url, l))
flist.append(urljoin(url, l))
md = pd.DataFrame(flist, columns=['link'])
md['name'] = md['link'].map(lambda x: osp.basename(x)[:-5])
global metadata
metadata = md[['name', 'link']].copy()
def has_newer_source(v):
"""accepts a int and return true if version inferred from metadata is bigger."""
if metadata is None:
load_metadata()
link = metadata.loc[0, 'link']
ver = re.match('.*files_v(\d+).*', link).groups()[0]
if int(ver) > v:
return True
return False
def bulk_download(out_dir, name=None):
if metadata is None:
load_metadata()
if name:
names = [name]
else:
names = metadata['name'].values
for n in names:
if n not in metadata['name'].values:
raise KeyError("{} not found in page.".format(n))
link = metadata.loc[metadata['name'] == n, 'link'].values[0]
res = requests.get(link)
out_path = osp.join(out_dir, osp.basename(link))
with open(osp.expanduser(out_path), 'wb') as f:
b = BytesIO(res.content)
f.write(b.read())
f.close()
|
Python
| 0
|
@@ -160,31 +160,8 @@
pd%0A%0A
-from io import BytesIO%0A
from
@@ -1549,19 +1549,15 @@
-b = BytesIO
+f.write
(res
@@ -1570,38 +1570,8 @@
nt)%0A
- f.write(b.read())%0A
|
5dce1ee6c54d8686cee42651528c087e9939368b
|
Bump version, 0.9.4.21
|
dp_tornado/version.py
|
dp_tornado/version.py
|
__version_info__ = (0, 9, 4, 21)
__version__ = '.'.join(map(str, __version_info__))
|
Python
| 0
|
@@ -27,9 +27,9 @@
4, 2
-1
+2
)%0A__
|
f35194ee63f4f1a4735e1e88c84264162914f415
|
stop using dataset name
|
FaostatExtractor/es/weso/faostat/translator/model_object_builder.py
|
FaostatExtractor/es/weso/faostat/translator/model_object_builder.py
|
# coding=utf-8
'''
Created on 02/02/2014
@author: Dani
'''
from lpentities.observation import Observation
from lpentities.country import Country
from lpentities.indicator import Indicator
from lpentities.license import License
from lpentities.measurement_unit import MeasurementUnit
from lpentities.computation import Computation
from lpentities.instant import Instant
from lpentities.data_source import DataSource
from lpentities.organization import Organization
from lpentities.dataset import Dataset
from lpentities.value import Value
from es.weso.faostat.translator.translator_const import TranslatorConst
from datetime import datetime
from lpentities.year_interval import YearInterval
class ModelObjectBuilder(object):
'''
classdocs
'''
def __init__(self, registers, config, log):
self.log = log
self.config = config
self.registers = registers
self.country_list = []
self.dataset = self.build_dataset()
self.default_computation = Computation(Computation.RAW)
'''
Constructor
'''
def build_dataset(self):
#Creating dataset object
dataset = Dataset(dataset_id="resources_land_e", name="Faostat land usage data", frequency="")
#creating related objects
#Organization
org = Organization(name="FAO: Food and Agriculture Organization of the United Nations", url="http://www.fao.org/")
#datasource
datasource = DataSource(source_id="faostat", name="Faostat. Statistcis division of the FAO")
#license
license_type = License(description="Attribution and need permission for commercial use",
name="CopyrightFao",
republish=True,
url="http://www.fao.org/contact-us/terms/en/")
#linking objects
org.add_data_source(datasource)
datasource.add_dataset(dataset)
dataset.license_type = license_type
#Returning result
return dataset
def run(self):
for register in self.registers:
self.build_model_objects_from_register(register)
return self.dataset
def build_model_objects_from_register(self, register):
country = self.get_asociated_country(register[TranslatorConst.COUNTRY]) # Done. BUT REVIEW HOW WE GET COUNTRIES
new_observation = Observation(observation_id="abba")
self.add_indicator_to_observation(new_observation, register) # DONE
self.add_value_to_observation(new_observation, register) # DONE
self.add_computation_to_observation(new_observation) # DONE
self.add_reftime_to_observation(new_observation, register) # DONE
self.add_issued_to_observation(new_observation, register) # DONE
country.add_observation(new_observation)
self.dataset.add_observation(new_observation)
def add_issued_to_observation(self, observation, register):
#Adding time in which the observation has been treated by us
observation.issued = Instant(datetime.now())
def add_reftime_to_observation(self, observation, register):
observation.ref_time = YearInterval(year=register[TranslatorConst.YEAR])
def add_computation_to_observation(self, observation):
observation.computation = self.default_computation
def add_value_to_observation(self, observation, register):
value = Value()
value.value_type = "float"
if register[TranslatorConst.VALUE] is None or register[TranslatorConst.VALUE] == "":
value.obs_status = Value.MISSING
else:
value.obs_status = Value.AVAILABLE
value.value = register[TranslatorConst.VALUE]
observation.value = value
def add_measurement_unit_to_indicator(self, indicator, register):
indicator.measurement_unit = MeasurementUnit(register[TranslatorConst.UNIT])
def add_indicator_to_observation(self, observation, register):
#name, description, id
indicator = Indicator(name=register[TranslatorConst.ITEM],
description=self.get_indicator_description(register[TranslatorConst.ITEM_CODE]),
indicator_id=self.get_indicator_id(register))
self.add_measurement_unit_to_indicator(indicator, register)
observation.indicator = indicator
def get_indicator_id(self, register):
return "FAOSTAT_" + str(register[TranslatorConst.ITEM_CODE])
def get_indicator_description(self, indicator_code):
if indicator_code == 6601:
return "Land Area. Total area in sq. km of the referred region"
elif indicator_code == 6610:
return "Agricultural land. Total area in sq. km. for agriculture of the referred region"
elif indicator_code == 6661:
return "Forest land. Total forest surface in sq. km of the referred region"
elif indicator_code == 6621:
return "Arable area. Total arable surface in sq. km. of the referred region"
else:
raise RuntimeError("Unknown indicator. No description found")
def get_asociated_country(self, country_code):
'''
Strategy: we are adding the filed "country_name" to the entity class country
First we check if in our country list exist an object that has a field
counrtry_code with the same value as the received one. If not, we ask for
an object country and add it to self.country_list
There are already code attributes in Country class, but not the faostat one
that we are using. We are adding a new attribute to the standard Country
objects to work with only in this method
'''
country_found = None
for country in self.country_list:
if country.country_code == country_code:
country_found = country
break
if country_found is not None:
return country_found
else:
new_country = self.TEMPORAL_get_country(country_code)
new_country.country_code = country_code
self.country_list.append(new_country)
return new_country
def TEMPORAL_get_country(self, country_name):
no_whites_name = country_name.replace(" ", "")
return Country(name=country_name, iso3=no_whites_name, iso2=no_whites_name)
|
Python
| 0.000007
|
@@ -1200,40 +1200,8 @@
_e%22,
- name=%22Faostat land usage data%22,
fre
|
7eca9eb4d5c7134b84c3462ac01cf1679557819f
|
Update example
|
example/app/tables.py
|
example/app/tables.py
|
#!/usr/bin/env python
# coding: utf-8
from table.columns import Column, LinkColumn, DatetimeColumn, Link
from table.utils import A
from table import Table
from models import Person
class PersonTable(Table):
id = Column(field='id', header=u'#', header_attrs={'width': '5%'})
name = Column(field='name', header=u'姓名')
action = LinkColumn(header=u'操作', links=[Link(text=u'编辑', viewname='app.views.edit', args=(A('id'),))])
class Meta:
model = Person
ext_button_link = "http://www.baidu.com"
ext_button_text = "Add +"
|
Python
| 0.000001
|
@@ -330,10 +330,12 @@
r=u'
-%E5%A7%93%E5%90%8D
+NAME
') %0D
@@ -372,10 +372,14 @@
r=u'
-%E6%93%8D%E4%BD%9C
+ACTION
', l
@@ -400,10 +400,12 @@
t=u'
-%E7%BC%96%E8%BE%91
+EDIT
', v
@@ -516,70 +516,169 @@
ton_
-link = %22http://www.baidu.com%22%0D%0A ext_button_text = %22Add +%22
+template = %22button.html%22%0D%0A # disable_search = True%0D%0A # disable_info = True%0D%0A # disable_length_menu = True%0D%0A # disable_pagination = True
%0D%0A
|
3a156fb107db25c8171adcc1346fd17f36222092
|
Fix refresh count
|
galaxy/main/management/commands/refresh_role_counts.py
|
galaxy/main/management/commands/refresh_role_counts.py
|
import time
from math import ceil, floor
from github import Github
from django.conf import settings
from django.db.models import Max
from django.core.management.base import BaseCommand, CommandError
from galaxy.main.models import Role, RefreshRoleCount
from galaxy.main.celerytasks.tasks import refresh_role_counts
class Command(BaseCommand):
help = 'Update each role with GitHub counts'
def handle(self, *args, **options):
agg = Role.objects.filter(is_valid=True,active=True).aggregate(Max('id'))
max_id = agg['id__max']
size = ceil(max_id / float(len(settings.GITHUB_TASK_USERS)))
in_list = []
print 'Refresh Role Counts'
# for i in range(len(settings.GITHUB_TASK_USERS)):
i = 1
start = size * i
end = size * (i + 1)
print 'User: %s' % settings.GITHUB_TASK_USERS[i]['username']
print 'Range: %d - %d' % (start, end)
r = RefreshRoleCount.objects.create(
state='PENDING',
description='User: %s Range: %s-%s' % (settings.GITHUB_TASK_USERS[i]['username'], start, end)
)
in_list.append(r.id)
gh_api = Github(settings.GITHUB_TASK_USERS[i]['username'],settings.GITHUB_TASK_USERS[i]['password'])
refresh_role_counts.delay(start, end, gh_api, r)
print "Request submitted to Celery."
finished = False
started = time.time()
while not finished:
finished = True
for obj in RefreshRoleCount.objects.filter(pk__in=in_list,state__not='COMPLETED'):
if not obj.state == 'FINISHED':
finished = False
else:
print '%s Total: %s Passed: %s Failed: %s' % (obj.description, obj.failed + obj.passed, obj.passed, obj.failed)
obj.state = 'COMPLETED'
obj.save()
time.sleep(60)
elapsed = time.time() - started
hours = floor(elapsed / 3600) if elapsed > 3600 else 0
minutes = floor((elapsed - (hours * 3600)) / 60) if (elapsed - (hours * 3600)) > 60 else 0
seconds = elapsed - (hours * 3600) - (minutes * 60)
print 'Elapsed time %02d.%02d.%02d' % (hours, minutes, seconds)
|
Python
| 0.000001
|
@@ -128,17 +128,20 @@
port Max
+, Q
%0A
-
from dja
@@ -689,10 +689,8 @@
- #
for
@@ -743,18 +743,8 @@
- i = 1%0A
@@ -772,16 +772,20 @@
+
+
end = si
@@ -797,16 +797,20 @@
(i + 1)%0A
+
@@ -870,32 +870,36 @@
rname'%5D%0A
+
print 'Range: %25d
@@ -928,16 +928,20 @@
+
+
r = Refr
@@ -981,16 +981,20 @@
+
state='P
@@ -1002,16 +1002,20 @@
NDING',%0A
+
@@ -1120,18 +1120,26 @@
+
+
)%0A
+
@@ -1159,16 +1159,20 @@
d(r.id)%0A
+
@@ -1272,16 +1272,20 @@
word'%5D)%0A
+
@@ -1564,18 +1564,16 @@
ist,
+~Q(
state
-__not
='CO
@@ -1581,16 +1581,17 @@
PLETED')
+)
:%0A
|
aa349112b891e76c8fbbd7033586f48426c3fbe7
|
Fix linux SystemProfiler cpu order of operations and update deprecated psutil calls.
|
lib/python/plow/rndaemon/profile/linux.py
|
lib/python/plow/rndaemon/profile/linux.py
|
"""
System profile plugin for Linux.
"""
import os
import re
import logging
import itertools
from functools import partial
import psutil
from .posix import SystemProfiler as PosixSystemProfiler
logger = logging.getLogger(__name__)
__all__ = ["SystemProfiler"]
class SystemProfiler(PosixSystemProfiler):
def __init__(self):
super(SystemProfiler, self).__init__()
self.data['bootTime'] = int(psutil.BOOT_TIME)
self.cpuprofile = None
self._init_cpu_info()
def __repr__(self):
return "<%s: Linux>" % self.__class__.__name__
def _init_cpu_info(self):
"""Init CPU stats that don't change over time"""
cpuinfo = CpuProfile()
model = ''
if cpuinfo.physical_cpus:
# grab the model of the first processor entry
first_cpu = cpuinfo.physical_cpus.itervalues().next()
model = first_cpu.get('model', '')
self.hyperthread_factor = first_cpu.get('ht_factor', 1)
self.data.update({
'cpuModel': model,
'physicalCpus': cpuinfo.num_phys_cpus,
'logicalCpus': psutil.NUM_CPUS,
})
self.cpuprofile = cpuinfo
def _update(self):
memstats = psutil.virtual_memory()
swapstats = psutil.swap_memory()
b_to_mb = 1024 ** 2
self.data.update({
'freeRamMb': memstats.available / b_to_mb,
'totalRamMb': memstats.total / b_to_mb,
'freeSwapMb': swapstats.free / b_to_mb,
'totalSwapMb': swapstats.total / b_to_mb,
})
def getSubprocessOpts(self, cmd, **kwargs):
"""
getSubprocessOpts(list|str cmd, **kwargs) -> (cmd, dict)
Method for returning the appropriate subprocess.Popen
arguments and kw arguments for a Linux platform.
"""
cmd, opts = super(SystemProfiler, self).getSubprocessOpts(cmd, **kwargs)
cpuprofile = self.cpuprofile
env = opts['env']
# uid = env.get('PLOW_TASK_UID')
# gid = env.get('PLOW_TASK_GID')
cpus = kwargs.get('cpus', set())
opts['preexec_fn'] = partial(self._preexec_fn,
cpus=cpus,
cpu_map=cpuprofile.logical_cpus)
return cmd, opts
@staticmethod
def _preexec_fn(**kwargs):
"""
_preexec_fn(**kwargs) -> void
static method used for a subprocess.Popen call,
to be executed in the process right before calling the command.
Sets the process to the given uid and gid.
Locks hyperthreaded processors to the process tree
"""
uid = kwargs.get("uid")
gid = kwargs.get("gid")
cpus = kwargs.get("cpus")
cpu_map = kwargs.get("cpu_map")
if gid is not None:
os.setgid(int(gid))
if uid is not None:
os.setuid(int(uid))
if cpus is not None and cpu_map is not None:
logical_ids = set()
for slot in cpus:
logical_ids.update(cpu_map.get(slot, []))
if logical_ids:
p = psutil.Process(os.getpid())
p.set_cpu_affinity(logical_ids)
class CpuProfile(object):
"""
CpuProfile
Class that represents the mappings between
physical cpus, cores per cpu, and the logical
processors in the system.
Helps to account for and group hyperthreaded
processors with their actual core id and cpu id.
"""
CPUINFO = '/proc/cpuinfo'
def __init__(self):
self.physical_cpus = {}
self.logical_cpus = {}
self.num_cpus = 0
self.num_phys_cpus = 0
self.update()
def update(self):
FIELDS = set([
'processor', 'physical id', 'siblings',
'cpu cores', 'core id', 'model name'
])
cpus = {}
proc = {}
log_cpu_count = 0
with open(self.CPUINFO) as f:
for line in f:
# if we reach a delimeter, save the current
# proc object before clearing and starting over
if proc and line.strip() == "":
phys_id = proc.get('physical id', -1)
core_id = proc.get('core id', log_cpu_count)
proc_id = proc['processor']
phys_dict = cpus.get(phys_id, {})
phys_dict.setdefault('processors', {})\
.setdefault(core_id, set()).add(proc_id)
if not phys_id in cpus:
sibs = proc.get('siblings', 1)
cores = proc.get('cpu cores', 1)
model = re.sub(r'\s+', ' ', proc['model name'])
ht_factor = (sibs / cores) if (sibs > cores) else 1
phys_dict['siblings'] = sibs
phys_dict['num_cores'] = cores
phys_dict['model'] = model
phys_dict['ht_factor'] = ht_factor
phys_dict['ht_enabled'] = True if ht_factor > 1 else False
cpus[phys_id] = phys_dict
proc = {}
log_cpu_count += 1
continue
try:
k, v = re.split(r'\s+:\s*', line, 1)
except ValueError:
continue
if k in FIELDS:
v = v.strip()
proc[k.strip()] = int(v) if v.isdigit() else v
# For vm's or systems that aren't reporting the real
# physical cpu id's, fix the "catch-all" category count
# to just represent all processors as physical
phys_dict = cpus.get(-1)
if phys_dict:
total = len(phys_dict['processors'])
phys_dict['siblings'] = total
phys_dict['num_cores'] = total
self.num_cpus = log_cpu_count
self.num_phys_cpus = sum(i['num_cores'] for i in cpus.itervalues())
self.physical_cpus = cpus
self.logical_cpus = dict(enumerate(itertools.chain.from_iterable(
cpu['processors'].itervalues() for cpu in cpus.itervalues()
)))
|
Python
| 0
|
@@ -374,17 +374,16 @@
nit__()%0A
-%0A
@@ -421,81 +421,21 @@
til.
-BOOT_TIME)%0A%0A self.cpuprofile = None%0A%0A self._init_cpu_info()
+boot_time())%0A
%0A%0A
@@ -1073,16 +1073,19 @@
til.
-NUM_CPUS
+cpu_count()
,%0A
@@ -1144,32 +1144,62 @@
_update(self):%0A
+ self._init_cpu_info()%0A
memstats
|
cd9e9efd8587b5be9e3d9a4e7efeaf26b048b0d2
|
fix attribute error on handlers loading
|
lib/rapidsms/contrib/handlers/settings.py
|
lib/rapidsms/contrib/handlers/settings.py
|
#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
INSTALLED_HANDLERS = None
EXCLUDED_HANDLERS = []
|
Python
| 0.000001
|
@@ -94,8 +94,44 @@
RS = %5B%5D%0A
+RAPIDSMS_HANDLERS_EXCLUDE_APPS = %5B%5D
|
5223846786b70dd9c198f98f7a620e70b40fab3d
|
update k84
|
chap09/k84.py
|
chap09/k84.py
|
#
# usage: python k84.py {N}
#
import sys
import plyvel
from math import log
def wc_matrix(n, ofn):
co_db = plyvel.DB('./co.ldb', create_if_missing=True)
word_db = plyvel.DB('./word.ldb', create_if_missing=True)
context_db = plyvel.DB('./context.ldb', create_if_missing=True)
x = 0
ZERO = x.to_bytes((x.bit_length() + 7) // 8, 'big')
for k, v in co_db:
tmp = k.decode('utf-8').strip().split('\t')
if len(tmp) != 2:
continue
x = 0
f_tc = int.from_bytes(v, 'big')
if f_tc >= 10:
f_t = int.from_bytes(word_db.get(tmp[0].encode('utf-8'), ZERO), 'big')
f_c = int.from_bytes(context_db.get(tmp[1].encode('utf-8'), ZERO), 'big')
x = max(log(2, n * f_tc / (f_t * f_c)), 0)
if x != 0:
with open(ofn, 'a') as f:
f.write('{}\t{}\t{}\n'.format(tmp[0], tmp[1], x))
co_db.close()
word_db.close()
context_db.close()
if __name__ == '__main__':
N = int(sys.argv[1])
ofn = 'wc-matrix.txt'
wc_matrix(N, ofn)
|
Python
| 0
|
@@ -50,16 +50,30 @@
plyvel%0A
+import struct%0A
from mat
@@ -90,18 +90,22 @@
og%0A%0Adef
-w
c
+reate
_matrix(
@@ -105,21 +105,16 @@
matrix(n
-, ofn
):%0A c
@@ -300,73 +300,72 @@
ue)%0A
-%0A
-x = 0%0A ZERO = x.to_bytes((x.bit_length() + 7) // 8, 'big'
+matrix_db = plyvel.DB('./matrix.ldb', create_if_missing=True
)%0A%0A
@@ -623,38 +623,32 @@
.encode('utf-8')
-, ZERO
), 'big')%0A
@@ -715,14 +715,8 @@
-8')
-, ZERO
), '
@@ -812,160 +812,395 @@
-with open(ofn, 'a') as f:%0A f.write('%7B%7D%5Ct%7B%7D%5Ct%7B%7D%5Cn'.format(tmp%5B0%5D, tmp%5B1%5D, x))%0A%0A co_db.close()%0A word_db.close()%0A context_db.close(
+matrix_db.put(k, struct.pack('%3Ed', x))%0A%0A co_db.close()%0A word_db.close()%0A context_db.close()%0A matrix_db.close()%0A%0Adef get_matrix(t, c):%0A matrix_db = plyvel.DB('./matrix.ldb', create_if_missing=True)%0A t_key = '%5Ct'.join((t, c)).encode('utf-8')%0A v = float(struct.unpack('%3Ed', matrix_db.get(t_key))%5B0%5D)%0A matrix_db.close()%0A%0A print('X(%22%7B%7D%22, %22%7B%7D%22) = %7B%7D'.format(t, c, v)
)%0A%0Ai
@@ -1254,49 +1254,45 @@
1%5D)%0A
+%0A
-ofn = 'wc-matrix.txt'%0A
+create_matrix(N)
%0A
-wc
+get
_matrix(
N, o
@@ -1291,12 +1291,15 @@
rix(
-N, ofn
+'of', 'a'
)%0A
|
5f96021cf88201447a48a05dab0ff5a1f131b6bc
|
Fix problem_id command with anonymize argument
|
edx_data_research/reporting/problem_ids/problem_id.py
|
edx_data_research/reporting/problem_ids/problem_id.py
|
'''
In this module, we will generate a csv report for a given problem id, which
will include information about how students fared with a given problem id
'''
from itertools import groupby
from edx_data_research.reporting.edx_base import EdX
class ProblemId(EdX):
def __init__(self, args):
super(self.__class__, self).__init__(args)
self.problem_id = args.problem_id
self.final_attempt = args.final_attempt
def _generate_name_from_problem_id(problem_id, display_name, final_attempt):
'''Generate name of csv output file from problem id'''
attempts_name = '_FinalAttempts' if final_attempt else '_AllAttempts'
return ('_'.join(problem_id.split('/')[3:]) + '_' +
''.join(e for e in display_name if e.isalnum()) + attempts_name +
'.csv')
def problem_id(edx_obj):
edx_obj.collections = ['problem_ids']
cursor = edx_obj.collections['problem_ids'].find({'event.problem_id' :
edx_obj.problem_id})
display_name = cursor[0]['module']['display_name']
one_record = cursor[0]['event']
problem_ids_keys = sorted(one_record['correct_map'].keys(),
key=lambda x : int(x.split('_')[-2]))
problem_ids = []
for key in problem_ids_keys:
try:
item = one_record['submission'][key]
value = item['question']
problem_ids.append('{0} : {1}'.format(key, value))
except UnicodeEncodeError:
value = value.encode("utf-8")
problem_ids.append('{0} : {1}'.format(key, value))
except KeyError:
problem_ids.append('{0}'.format(key))
result = []
for document in cursor:
answers = []
for key in sorted(document['event']['correct_map'].keys(),
key=lambda x : int(x.split('_')[-2])):
try:
answers.append(document['event']['submission'][key]['answer'])
except KeyError:
answers.append('')
result.append([document['hash_id'], document['username'],
document['event']['attempts'],
document['module']['display_name'], document['time'],
document['event']['success'], document['event']['grade'],
document['event']['max_grade']] + answers)
if edx_obj.final_attempt:
result = [max(items, key=lambda x : x[1]) for key, items in
groupby(sorted(result, key=lambda x : x[0]), lambda x : x[0])]
csv_report_name = _generate_name_from_problem_id(edx_obj.problem_id,
display_name,
edx_obj.final_attempt)
headers = (['Hash ID'] if edx_obj.anonymize else
['Hash ID', 'User ID', 'Username'])
headers.extend(['Attempt Number', 'Module', 'Time', 'Success',
'Grade Achieved', 'Max Grade'])
edx_obj.generate_csv(result, headers, csv_report_name)
|
Python
| 0.00177
|
@@ -2038,21 +2038,77 @@
r
-esult.append(
+ow = (%5Bdocument%5B'hash_id'%5D%5D if edx_obj.anonymize else%0A
%5Bdoc
@@ -2124,16 +2124,37 @@
sh_id'%5D,
+ document%5B'user_id'%5D,
documen
@@ -2162,25 +2162,26 @@
%5B'username'%5D
-,
+%5D)
%0A
@@ -2173,39 +2173,36 @@
%5D%5D)%0A
-
+row.extend(%5B
document%5B'ev
@@ -2216,28 +2216,24 @@
attempts'%5D,%0A
-
@@ -2304,36 +2304,32 @@
-
document%5B'event'
@@ -2381,36 +2381,32 @@
-
-
document%5B'event'
@@ -2432,16 +2432,43 @@
nswers)%0A
+ result.append(row)%0A
if e
|
01003d7b64220b794d8e10e78dd26badef4dfcc5
|
Fix tests
|
base/auth/tests.py
|
base/auth/tests.py
|
from flask_testing import TestCase
from ..app import create_app
from ..config import test
from ..ext import db
class BaseCoreTest(TestCase):
def create_app(self):
return create_app(test)
def setUp(self):
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
def test_users(self):
from base.auth.models import User
response = self.client.post('/users/login/', data=dict())
self.assertRedirects(response, '/')
user = User(username='test', pw_hash='test', email='test@test.com')
db.session.add(user)
db.session.commit()
self.assertTrue(user.updated)
response = self.client.post('/users/login/', data=dict(
email='test@test.com',
action_save=True,
password='test'))
self.assertRedirects(response, '/users/profile/')
response = self.client.get('/users/logout/')
self.assertRedirects(response, '/')
response = self.client.post('/users/register/', data=dict(
username='test2',
email='test2@test.com',
action_save=True,
password='test',
password_confirm='test',
))
self.assertRedirects(response, '/users/profile/')
user = User.query.filter(User.username == 'test2').first()
self.assertEqual(user.email, 'test2@test.com')
def test_manager(self):
from base.auth.models import Role, User
from manage import manager
manager.app = self.app
manager.handle('manage', 'create_role', ['test'])
role = Role.query.filter(Role.name == 'test').first()
self.assertEqual(role.name, 'test')
manager.handle('manage', 'create_user', 'test test@test.com -p 12345'.split())
user = User.query.filter(User.username == 'test').first()
manager.handle('manage', 'add_role', 'test test'.split())
self.assertTrue(role in user.roles)
def test_oauth(self):
from flask import url_for
self.assertTrue(url_for('login_twitter'))
|
Python
| 0.000003
|
@@ -667,16 +667,19 @@
.updated
+_at
)%0A%0A
|
69e760e4a571d16e75f30f1e97ea1a917445f333
|
Switch to recipe engine "url" module.
|
recipes/recipe_modules/gitiles/__init__.py
|
recipes/recipe_modules/gitiles/__init__.py
|
DEPS = [
'recipe_engine/json',
'recipe_engine/path',
'recipe_engine/python',
'recipe_engine/raw_io',
'url',
]
|
Python
| 0.000023
|
@@ -115,16 +115,30 @@
',%0A '
+recipe_engine/
url',%0A%5D%0A
|
57d3b80e85439215222372b03bf25762d8df3eb7
|
kill any already-running autotest instances during setup
|
wlauto/workloads/power_loadtest/__init__.py
|
wlauto/workloads/power_loadtest/__init__.py
|
# Copyright 2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# pylint: disable=attribute-defined-outside-init
import os
import re
from wlauto import Workload, Parameter
from wlauto.exceptions import WorkloadError
from wlauto.utils.misc import which, check_output
from wlauto.utils.types import arguments, numeric
# Location of the power_LoadTest under the chroot
#POWER_LOADTEST_DIR = '/mnt/host/source/src/third_party/autotest/files/client/site_tests/power_LoadTest'
MARKER = '---------------------------'
STATUS_REGEX = re.compile(r'^\S+\s+\[\s*(\S+)\s*\]')
METRIC_REGEX = re.compile(r'^\S+\s+(\S+)\s*(\S+)')
class PowerLoadtest(Workload):
name = 'power_loadtest'
description = '''
power_LoadTest (part of ChromeOS autotest suite) continuously cycles through a set of
browser-based activities and monitors battery drain on a device.
.. note:: This workload *must* be run inside a CromeOS SDK chroot.
See: https://www.chromium.org/chromium-os/testing/power-testing
'''
parameters = [
Parameter('board', default=os.getenv('BOARD'),
description='''
The name of the board to be used for the test. If this is not specified,
BOARD environment variable will be used.
'''),
Parameter('variant',
description='''
The variant of the test to run; If not specified, the full power_LoadTest will
run (until the device battery is drained). The only other variant available in the
vanilla test is "1hour", but further variants may be added by providing custom
control files.
'''),
Parameter('test_that_args', kind=arguments, default='',
description='''
Extra arguments to be passed to test_that_invocation.
'''),
Parameter('run_timeout', kind=int, default=24 * 60 * 60,
description='''
Timeout, in seconds, for the test execution.
'''),
]
def setup(self, context):
if self.device.platform != 'chromeos':
raise WorkloadError('{} only supports ChromeOS devices'.format(self.name))
self.test_that = which('test_that')
if not self.test_that:
message = ('Could not find "test_that"; {} must be running in a ChromeOS SDK chroot '
'(did you execute "cros_sdk"?)')
raise WorkloadError(message.format(self.name))
self.command = self._build_command()
self.raw_output = None
def run(self, context):
self.logger.debug(self.command)
self.raw_output, _ = check_output(self.command, timeout=self.run_timeout, shell=True)
def update_result(self, context):
if not self.raw_output:
self.logger.warning('No power_LoadTest output detected; run failed?')
return
raw_outfile = os.path.join(context.output_directory, 'power_loadtest.raw')
with open(raw_outfile, 'w') as wfh:
wfh.write(self.raw_output)
context.add_artifact('power_LoadTest_raw', raw_outfile, kind='raw')
lines = iter(self.raw_output.split('\n'))
# Results are delimitted from the rest of the output by MARKER
for line in lines:
if MARKER in line:
break
for line in lines:
match = STATUS_REGEX.search(line)
if match:
status = match.group(1)
if status != 'PASSED':
self.logger.warning(line)
match = METRIC_REGEX.search(line)
if match:
try:
context.result.add_metric(match.group(1), numeric(match.group(2)), lower_is_better=True)
except ValueError:
pass # non-numeric metrics aren't supported
def _build_command(self):
test_name = 'power_LoadTest'
if self.variant:
test_name += '.' + self.variant
parts = [self.test_that, self.device.host, test_name]
if self.board:
parts.append('-b {}'.format(self.board))
parts.append(str(self.test_that_args))
return ' '.join(parts)
|
Python
| 0
|
@@ -3132,16 +3132,135 @@
t = None
+%0A # make sure no other test is running%0A self.device.execute('killall -9 autotest', check_exit_code=False)
%0A%0A de
|
6ced33f201e8a4e389a077a91ba9ed8bf5c19fa0
|
fix issue with number for samples
|
wqflask/wqflask/correlation/pre_computes.py
|
wqflask/wqflask/correlation/pre_computes.py
|
"""module contains the code to do the
precomputations of sample data between
two entire datasets"""
import json
from typing import List
from base import data_set
from gn3.computations.correlations import fast_compute_all_sample_correlation
from gn3.computations.correlations import map_shared_keys_to_values
def get_dataset_dict_data(dataset_obj):
"""function to get the dataset data mapped to key"""
dataset_obj.get_trait_data()
return map_shared_keys_to_values(dataset_obj.samplelist,
dataset_obj.trait_data)
def fetch_datasets(base_dataset_name: str, target_dataset_name: str) ->List:
"""query to fetch create datasets and fetch traits
all traits of a dataset"""
# doesnt work for temp
base_dataset = data_set.create_dataset(dataset_name=base_dataset_name)
target_dataset = data_set.create_dataset(dataset_name=target_dataset_name)
# replace with map
return (map(get_dataset_dict_data,
[base_dataset, target_dataset]))
# in the base dataset we just need the traits
def pre_compute_sample_correlation(base_dataset: List,
target_dataset: List) -> List:
"""function compute the correlation between the
a whole dataset against a target
input: target&base_dataset(contains traits and sample results)
output: list containing the computed results
precaution:function is expensive;targets only Exon and
"""
for trait_info in base_dataset:
yield fast_compute_all_sample_correlation(corr_method="pearson",
this_trait=trait_info,
target_dataset=target_dataset)
def cache_to_file(base_dataset_name: str, target_dataset_name: str):
"""function to cache the results to file"""
# validate the datasets expiry first
base_dataset_data, target_dataset_data = [list(dataset) for dataset in list(
fetch_datasets(base_dataset_name, target_dataset_name))]
try:
with open("unique_file_name.json", "w") as file_handler:
file_handler.write()
dataset_correlation_results = list(pre_compute_sample_correlation(
base_dataset_data, target_dataset_data))
print(dataset_correlation_results)
json.dump(dataset_correlation_results, file_handler)
except Exception as error:
raise error
|
Python
| 0
|
@@ -158,16 +158,89 @@
ta_set%0A%0A
+from gn3.computations.correlations import compute_all_sample_correlation%0A
from gn3
@@ -378,16 +378,17 @@
values%0A%0A
+%0A
def get_
@@ -507,16 +507,55 @@
it_data(
+dataset_obj.group.all_samples_ordered()
)%0A re
@@ -1570,24 +1570,42 @@
nd%0A %22%22%22%0A%0A
+ results = %5B%5D%0A%0A
for trai
@@ -1641,14 +1641,262 @@
-yield
+result = fast_compute_all_sample_correlation(corr_method=%22pearson%22,%0A this_trait=trait_info,%0A target_dataset=target_dataset)%0A%0A # results.append(
fast
@@ -1942,32 +1942,42 @@
thod=%22pearson%22,%0A
+ #
@@ -2002,32 +2002,33 @@
+
this_trait=trait
@@ -2026,32 +2026,43 @@
ait=trait_info,%0A
+ #
@@ -2125,16 +2125,85 @@
dataset)
+)%0A print(%22finished%22)%0A print(result)%0A%0A return results
%0A%0A%0Adef c
@@ -2504,16 +2504,49 @@
ame))%5D%0A%0A
+ # print(target_dataset_data)%0A
%0A try
@@ -2554,16 +2554,18 @@
%0A
+ #
with op
@@ -2621,16 +2621,18 @@
%0A
+ #
file_ha
@@ -2684,21 +2684,16 @@
sults =
-list(
pre_comp
@@ -2767,18 +2767,16 @@
et_data)
-)%0A
%0A
@@ -2819,16 +2819,18 @@
%0A
+ #
json.du
@@ -2926,8 +2926,111 @@
e error%0A
+%0A%0Adef check_cached_files_validity():%0A %22%22%22function to check the validity of cached files%22%22%22%0A pass%0A
|
e91e2a8b9ac6af9a20d27b4f8d5a990938973491
|
Fix for Results screen.
|
satori.web/satori/web/views/contest/results.py
|
satori.web/satori/web/views/contest/results.py
|
# vim:ts=4:sts=4:sw=4:expandtab
from satori.client.common import want_import
want_import(globals(), '*')
from satori.web.utils.decorators import contest_view
from satori.web.utils.tables import *
from django.shortcuts import render_to_response
from django.core.urlresolvers import reverse
from django import forms
@contest_view
def view(request, page_info):
contest = page_info.contest
admin = page_info.contest_is_admin
class SubmitsTable(ResultTable):
def length(self):
return len(self.results)
@staticmethod
def default_limit():
return 30
@staticmethod
def max_limit():
return 100000
def __init__(self,req,prefix=''):
super(SubmitsTable,self).__init__(req=req,prefix=prefix,autosort=False)
if admin and 'cts' in self.filters.keys():
contestant = Contestant(int(self.filters['cts']))
else:
contestant = None
if 'problem' in self.filters.keys():
problem = Problem(int(self.filters['problem']))
else:
problem = None
limit = int(self.params['limit'])
if limit==0:
limit = max_limit
page = int(self.params['page'])
self.showdiff=int(self.params.get('diff','0'))
query = Web.get_results(contest=contest,contestant=contestant,problem=problem,offset=(page-1)*limit,limit=limit)
self.results = query.results
self.total = query.count
self.fields.append(TableField(name='No.',
value=(lambda table,i: table.results[i].submit.id),
render=(lambda table,i: '<a class="stdlink" href="'+reverse('view_result',args=[contest.id,table.results[i].submit.id])+'">'
+unicode(table.results[i].submit.id)+'</a>'),
id=1))
if self.showdiff:
self.fields.append(TableField(name='',value='',render=(lambda table,i: '<input type="radio" name="diff_1" value="'+unicode(table.results[i].submit.id)+'"/>'),id='diff_1'))
self.fields.append(TableField(name='',value='',render=(lambda table,i: '<input type="radio" name="diff_2" value="'+unicode(table.results[i].submit.id)+'"/>'),id='diff_2'))
if admin:
cts = TableField(name='Contestant',value=(lambda table,i: table.results[i].contestant.name),
render=(lambda table,i: '<a class="stdlink" href="'+table.getparams(filters={'cts' : unicode(table.results[i].contestant.id)},page=1)+'">'+table.results[i].contestant.name+'</a>'),
id='cts')
choices = [[unicode(c.id),c.name] for c in Web.get_accepted_contestants(contest=contest,limit=self.max_limit()).contestants]
self.fields.append(cts)
self.filter_functions.append(FilterFunction(name='Contestant',prefix='cts',choices=choices))
prf = TableField(name='Problem',value=(lambda table,i: table.results[i].problem_mapping.code), id='problem')
pmlist = Web.get_problem_mapping_list(contest=contest)
pmlist.sort(key=lambda p: p.problem_mapping.code)
pchoices = [[unicode(p.problem_mapping.id),p.problem_mapping.code+' ('+p.problem_mapping.title+')'] for p in pmlist]
self.fields.append(prf)
self.filter_functions.append(FilterFunction(name='Problem',prefix='problem',choices=pchoices))
self.fields.append(TableField(name='Time',value=(lambda table,i: table.results[i].submit.time), id=4))
self.fields.append(TableField(name='Status',value=(lambda table,i: table.results[i].status),
render=(lambda table,i: '<div class="submitstatus"><div class="sta'+unicode(table.results[i].status)+'">'+unicode(table.results[i].status)+'</div></div>')
,id=5,css='status'))
def suite_results(table,i):
r = Web.get_result_details(table.results[i].submit)
return '<br/>'.join([tsr.test_suite.name+': '+tsr.test_suite_result.status for tsr in r.test_suite_results])
if self.filters.get('allsuites','0')=='1':
self.fields.append(TableField(name='Results',value='',render=suite_results,id=6))
if self.showdiff:
newdiff = 0
else:
newdiff = 1
self.difflink = self.getparams(diff=newdiff)
if admin:
self.filter_functions.append(FilterFunction(prefix='allsuites',name='Show results on',choices=[('0','Default suite'),('1','All suites')],default='0',showall=False))
results = SubmitsTable(req = request.GET,prefix='results')
return render_to_response('results.html',{ 'page_info' : page_info, 'results' : results })
|
Python
| 0
|
@@ -1552,24 +1552,102 @@
diff','0'))%0A
+ detailed_tsr = admin and (self.filters.get('allsuites','0')=='1')%0A
@@ -1757,16 +1757,42 @@
it=limit
+,detailed_tsr=detailed_tsr
)%0A
@@ -4565,55 +4565,75 @@
r =
-Web.get_result_details(table.results%5Bi%5D.submit)
+table.results%5Bi%5D%0A d = r.__dict__%0A 8/0
%0A
@@ -4773,46 +4773,20 @@
if
-self.filters.get('allsuites','0')=='1'
+detailed_tsr
:%0A
|
5703faccd6e425148928e52221959464b1eac108
|
use recursive method
|
reg_tests/executeOpenfastRegressionCase.py
|
reg_tests/executeOpenfastRegressionCase.py
|
#
# Copyright 2017 National Renewable Energy Laboratory
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
This program executes OpenFAST and a regression test for a single test case.
The test case must be one of the CertTest cases. The test data is contained in a git submodule,
r-test, which must be initialized prior to running. r-test can be initialized
with `git submodule update --init --recursive` or updated with `git submodule update`.
Usage: `python3 executeRegressionTestCase.py testname openfast_executable source_directory build_directory tolerance system_name compiler_id`
Example: `python3 executeRegressionTestCase.py Test02 openfast path/to/openfast_repo path/to/openfast_repo/build 0.000001 [Darwin,Linux,Windows] [Intel,GNU]`
"""
import os
from stat import *
import sys
import shutil
import subprocess
##### Helper functions
def exitWithError(error):
print(error)
sys.exit(1)
def exitWithDirNotFound(dir):
exitWithError("Directory does not exist: {}\n".format(dir))
def exitWithFileNotFound(file):
exitWithError("File does not exist: {}\n".format(file))
##### Main program
### Determine python version
if sys.version_info < (3, 0): pythonCommand = "python"
else: pythonCommand = "python3"
### Verify input arguments
if len(sys.argv) < 6 or len(sys.argv) > 8:
exitWithError("Invalid arguments: {}\n".format(" ".join(sys.argv)) +
"Usage: {} executeRegressionTestCase.py testname openfast_executable source_directory build_directory tolerance system_name compiler_id".format(pythonCommand))
caseName = sys.argv[1]
executable = sys.argv[2]
sourceDirectory = sys.argv[3]
buildDirectory = sys.argv[4]
tolerance = sys.argv[5]
# verify executable
if not os.path.isfile(executable):
exitWithError("The given executable, {}, does not exist.".format(executable))
permissionsMask = oct(os.stat(executable)[ST_MODE])[-1:]
if not int(permissionsMask)%2 == 1:
exitWithError("The given executable, {}, does not have proper permissions.".format(executable))
# verify source directory
if not os.path.isdir(sourceDirectory):
exitWithError("The given source directory, {}, does not exist.".format(sourceDirectory))
# verify build directory
if not os.path.isdir(buildDirectory):
os.mkdir(buildDirectory)
if not os.path.isdir(buildDirectory):
exitWithError("The given build directory, {}, does not exist.".format(buildDirectory))
# verify tolerance
try:
float(tolerance)
except ValueError:
exitWithError("The given tolerance, {}, is not a valid number.".format(tolerance))
systemcompiler_given = True
try:
systemName = sys.argv[6]
except IndexError:
systemcompiler_given = False
systemName = "not_given"
try:
compilerId = sys.argv[7]
except IndexError:
systemcompiler_given = False
compilerId = "not_given"
### Map the system and compiler configurations to a solution set
# Internal names -> Human readable names
systemName_map = {
"darwin": "macos",
"linux": "linux"
}
compilerId_map = {
"gnu": "gnu",
"intel": "intel"
}
# Build the target output directory name or choose the default
if systemName.lower() not in systemName_map or compilerId.lower() not in compilerId_map:
targetSystem = "macos"
targetCompiler = "gnu"
else:
targetSystem = systemName_map.get(systemName.lower())
targetCompiler = compilerId_map.get(compilerId.lower())
outputType = os.path.join(targetSystem+"-"+targetCompiler)
print("-- Using gold standard files with machine-compiler type {}".format(outputType))
### Build the filesystem navigation variables for running openfast on the test case
regtests = os.path.join(sourceDirectory, "reg_tests")
lib = os.path.join(regtests, "lib")
rtest = os.path.join(regtests, "r-test")
moduleDirectory = os.path.join(rtest, "openfast")
inputsDirectory = os.path.join(moduleDirectory, caseName)
targetOutputDirectory = os.path.join(inputsDirectory, outputType)
testBuildDirectory = os.path.join(buildDirectory, caseName)
# verify all the required directories exist
if not os.path.isdir(rtest):
exitWithError("The test data directory, {}, does not exist. If you haven't already, run `git submodule update --init --recursive`".format(rtest))
if not os.path.isdir(targetOutputDirectory):
exitWithError("The test data outputs directory, {}, does not exist. Try running `git submodule update`".format(targetOutputDirectory))
if not os.path.isdir(inputsDirectory):
exitWithError("The test data inputs directory, {}, does not exist. Verify your local repository is up to date.".format(inputsDirectory))
# create the local output directory if it does not already exist
# and initialize it with input files for all test cases
for data in ["5MW_Baseline", "AOC", "AWT27", "SWRT", "UAE_VI", "WP_Baseline"]:
dataDir = os.path.join(buildDirectory, data)
if not os.path.isdir(dataDir):
shutil.copytree(os.path.join(moduleDirectory, data), dataDir)
if not os.path.isdir(testBuildDirectory):
shutil.copytree(inputsDirectory, testBuildDirectory)
### Run openfast on the test case
caseInputFile = os.path.join(testBuildDirectory, caseName + ".fst")
executionScript = os.path.join(lib, "executeOpenfastCase.py")
executionCommand = " ".join([pythonCommand, executionScript, caseInputFile, executable])
print("'{}' - running".format(executionCommand))
sys.stdout.flush()
executionReturnCode = subprocess.call(executionCommand, shell=True)
print("'{}' - finished with exit code {}".format(executionCommand, executionReturnCode))
if executionReturnCode != 0:
exitWithError("")
### Build the filesystem navigation variables for running the regression test
passFailScript = os.path.join(lib, "pass_fail.py")
localOutputFile = os.path.join(testBuildDirectory, caseName + ".outb")
goldStandardFile = os.path.join(targetOutputDirectory, caseName + ".outb")
if not os.path.isfile(passFailScript): exitWithFileNotFound(passFailScript)
if not os.path.isfile(localOutputFile): exitWithFileNotFound(localOutputFile)
if not os.path.isfile(goldStandardFile): exitWithFileNotFound(goldStandardFile)
passfailCommand = " ".join([pythonCommand, passFailScript, localOutputFile, goldStandardFile, tolerance])
print("'{}' - running".format(passfailCommand))
sys.stdout.flush()
passfailReturnCode = subprocess.call(passfailCommand, shell=True)
print("'{}' - finished with exit code {}".format(passfailCommand, passfailReturnCode))
# return pass/fail
sys.exit(passfailReturnCode)
|
Python
| 0.000019
|
@@ -2758,12 +2758,15 @@
os.m
-k
+ake
dir
+s
(bui
|
bb366439065924732b9b1559a0dc776c586fa07c
|
fix url
|
regulations/tests/selenium/example_test.py
|
regulations/tests/selenium/example_test.py
|
import os
import unittest
import base64
import json
import httplib
import sys
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
class ExampleTest(unittest.TestCase):
def setUp(self):
self.capabilities = webdriver.DesiredCapabilities.CHROME
self.capabilities['tunnel-identifier'] = os.environ['TRAVIS_JOB_NUMBER']
self.capabilities['build'] = os.environ['TRAVIS_BUILD_NUMBER']
self.capabilities['platform'] = 'LINUX'
self.capabilities['version'] = ''
self.capabilities['name'] = 'Example test'
self.username = os.environ['SAUCE_USERNAME']
self.key = os.environ['SAUCE_ACCESS_KEY']
hub_url = "%s:%s" % (self.username, self.key)
self.driver = webdriver.Remote(desired_capabilities=self.capabilities,
command_executor = ("http://%s@ondemand.saucelabs.com:80/wd/hub" % hub_url))
self.jobid = self.driver.session_id
print("Sauce Labs job: https://saucelabs.com/jobs/%s" % self.jobid)
self.driver.implicitly_wait(30)
def test_sauce(self):
self.driver.get('http://localhost:8000')
toc_link_1005_1 = self.driver.find_element_by_xpath('//*[@id="toc"]/ol/li[1]/a')
self.assertEquals(toc_link_1005_1.get_attribute('data-section-id'), '1005-1')
def tearDown(self):
print("https://saucelabs.com/jobs/%s" % self.driver.session_id)
self.driver.quit()
if __name__ == '__main__':
unittest.main()
|
Python
| 0.86565
|
@@ -1162,16 +1162,21 @@
ost:8000
+/1005
')%0A
|
494b87788d15b50169def821312660601a05101d
|
Add 'schema' command
|
ppshell.py
|
ppshell.py
|
# Dota 2 local DB pretty-print shell
import cmd
import d2mdb_const as const
import datetime
import sqlite3
import sys
import time
heroes = dict()
# a bunch of lambdas to convert from values in the DB to proper representations
FIELD_CONV = {
"hero": (lambda x: "%s (%d)" % (heroes[x], x)),
"team": (lambda x: const.TEAMS[x]),
"won": bool,
"duration": (lambda x: datetime.timedelta(seconds=int(x))),
"start_time": time.ctime,
"game_mode": (lambda x: const.GAME_MODES[x] if x in const.GAME_MODES else "? (%s)" % x),
"ranked": bool,
}
class Ppshell(cmd.Cmd):
def __init__(self, cur):
super(Ppshell, self).__init__()
self.cur = cur
self.prompt = '> '
def print_match(self, row):
for k in row.keys():
label = const.DB_FIELD_NAMES[k] if k in const.DB_FIELD_NAMES else k
v = row[k]
value = FIELD_CONV[k](v) if k in FIELD_CONV else v
print("%s: %s" % (label, value))
def do_raw(self, s):
'Execute a raw SQL query.'
try:
rows = self.cur.execute(s)
num_results = 0
for row in rows:
self.print_match(row)
print()
num_results = num_results + 1
print("%s result(s)" % num_results)
except sqlite3.OperationalError as sqlerror:
print("? - %s" % sqlerror)
def do_select(self, s):
'Execute a SQL SELECT query.'
self.do_raw("SELECT %s" % s)
def do_id(self, s):
'Get a single match result for a match ID.'
if len(s) == 0:
print("No ID specified.")
return
self.do_select("* FROM matches WHERE id=%s" % s)
def do_last(self, s):
'Get the latest match recorded.'
self.do_select("* FROM matches ORDER BY id DESC LIMIT 1")
def do_at(self, s):
'Get the match closest to the given time (in format YYYY-MM-DD hh-mm-ss)'
try:
time = datetime.datetime.strptime(s, "%Y-%m-%d %H-%M-%S").timestamp()
rows = self.cur.execute("SELECT id FROM matches ORDER BY ABS(start_time - ?) ASC LIMIT 1", (time,))
match_id = str(rows.fetchone()['id'])
self.do_id(match_id)
except ValueError as error:
print("? - %s" % error)
print("(Date needs to be in 'YYYY-MM-DD hh-mm-ss' format.)")
def do_heroes(self, s):
'Get the ID to hero mapping.'
rev_map = sorted([(b, a) for (a, b) in heroes.items()], key=lambda el: el[0])
for hero, hero_id in rev_map:
print("%s: %d" % (hero, hero_id))
def do_hero(self, s):
'Search for a hero (ID or substring).'
if s.isdigit():
try:
print(heroes[int(s)])
except KeyError:
print("No hero with ID %s." % s)
else:
for hero_id, hero in heroes.items():
if hero.lower().find(s.lower()) != -1:
print("%d (%s)" % (hero_id, hero))
return
print("Couldn't find hero with name \"%s\"." % s)
def do_exit(self, s):
'Exit the shell.'
print('Bye')
return True
def do_EOF(self, s):
return self.do_exit(s)
def emptyline(self):
pass
def main():
global heroes
# check command line arguments
if len(sys.argv) < 2:
print("Usage: %s <db file>" % sys.argv[0])
return
hero_db = sqlite3.connect(const.HEROES_DB_FILE)
hero_db_cur = hero_db.cursor()
heroes = hero_db_cur.execute("SELECT id, name FROM heroes").fetchall()
heroes = dict(heroes)
db_file = sys.argv[1]
db = sqlite3.connect(db_file)
db.row_factory = sqlite3.Row
cur = db.cursor()
shell = Ppshell(cur)
go = True
while go:
try:
shell.cmdloop()
go = False
except KeyboardInterrupt:
print("^C")
pass
db.close()
if __name__ == '__main__':
main()
|
Python
| 0.000001
|
@@ -2622,32 +2622,145 @@
%5C%22%25s%5C%22.%22 %25 s)%0A%0A
+%09def do_schema(self, s):%0A%09%09'Print out the schema (as expected by the program).'%0A%09%09print(const.SQL_MATCH_SCHEMA)%0A%0A
%09def do_exit(sel
|
bbee1e9b8563d56c0d0acbfc6ae61334f8251159
|
Reset default value in test so that it doesn't produce error
|
enthought/traits/tests/undefined_test_case.py
|
enthought/traits/tests/undefined_test_case.py
|
import unittest
from enthought.traits.api import HasTraits, Str, Undefined, ReadOnly, Float
class Foo(HasTraits):
name = Str()
original_name = ReadOnly
bar = Str
baz = Float
def _name_changed(self):
if self.original_name is Undefined:
self.original_name = self.name
class Bar(HasTraits):
name = Str(Undefined())
class UndefinedTestCase(unittest.TestCase):
def test_initial_value(self):
b = Bar()
self.failUnlessEqual( b.name, Undefined )
return
def test_name_change(self):
b = Bar()
b.name = 'first'
self.failUnlessEqual( b.name, 'first' )
return
def test_read_only_write_once(self):
f = Foo()
self.failUnlessEqual(f.name, '')
self.failUnless(f.original_name is Undefined)
f.name = 'first'
self.failUnlessEqual(f.name, 'first')
self.failUnlessEqual(f.original_name, 'first')
f.name = 'second'
self.failUnlessEqual(f.name, 'second')
self.failUnlessEqual(f.original_name, 'first')
return
def test_read_only_write_once_from_constructor(self):
f = Foo(name='first')
f.name = 'first'
self.failUnlessEqual(f.name, 'first')
self.failUnlessEqual(f.original_name, 'first')
f.name = 'second'
self.failUnlessEqual(f.name, 'second')
self.failUnlessEqual(f.original_name, 'first')
return
### EOF #######################################################################
|
Python
| 0.000001
|
@@ -350,18 +350,16 @@
ndefined
-()
)%0A%0Aclass
|
85f14fffc01002e5a1c0a7a3644a81a4ade61745
|
Bump dsub version to 0.2.1
|
dsub/_dsub_version.py
|
dsub/_dsub_version.py
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.2.1.dev0'
|
Python
| 0
|
@@ -1114,15 +1114,10 @@
= '0.2.1
-.dev0
'%0A
|
edb6f738979e213cca3fd03991caebdf209b09b9
|
Fix permissions script
|
static/extension/dynamic_scope/dynamic_permission.py
|
static/extension/dynamic_scope/dynamic_permission.py
|
# oxAuth is available under the MIT License (2008). See http://opensource.org/licenses/MIT for full text.
# Copyright (c) 2016, Gluu
#
# Author: Yuriy Movchan
#
from org.xdi.model.custom.script.type.scope import DynamicScopeType
from org.xdi.oxauth.service import UserService
from org.xdi.util import StringHelper, ArrayHelper
from java.util import Arrays, ArrayList
import java
class DynamicScope(DynamicScopeType):
def __init__(self, currentTimeMillis):
self.currentTimeMillis = currentTimeMillis
def init(self, configurationAttributes):
print "Permission dynamic scope. Initialization"
print "Permission dynamic scope. Initialized successfully"
return True
def destroy(self, configurationAttributes):
print "Permission dynamic scope. Destroy"
print "Permission dynamic scope. Destroyed successfully"
return True
# Update Json Web token before signing/encrypring it
# dynamicScopeContext is org.xdi.oxauth.service.external.context.DynamicScopeExternalContext
# configurationAttributes is java.util.Map<String, SimpleCustomProperty>
def update(self, dynamicScopeContext, configurationAttributes):
print "Permission dynamic scope scope. Update method"
authorizationGrant = dynamicScopeContext.getAuthorizationGrant()
user = dynamicScopeContext.getUser()
jsonWebResponse = dynamicScopeContext.getJsonWebResponse()
claims = jsonWebResponse.getClaims()
userService = UserService.instance()
roles = userService.getCustomAttribute(user, "role")
if roles != None:
claims.setClaim("role", roles.getValues())
return True
def logout(self, configurationAttributes, requestParameters):
return True
def getApiVersion(self):
return 1
|
Python
| 0.000001
|
@@ -230,16 +230,62 @@
peType%0D%0A
+from org.xdi.service.cdi.util import CdiUtil%0D%0A
from org
@@ -1595,16 +1595,29 @@
rvice =
+CdiUtil.bean(
UserServ
@@ -1623,18 +1623,8 @@
vice
-.instance(
)%0D%0A
|
c1923339d7d64b9e85e3a2a1522ff0442e18a798
|
Update common version (#6060)
|
sdk/core/azure-common/azure/common/_version.py
|
sdk/core/azure-common/azure/common/_version.py
|
#-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
VERSION = "1.1.22"
|
Python
| 0
|
@@ -318,11 +318,11 @@
= %221.1.2
-2
+3
%22%0A
|
554e97008dc5861314406fecb6cc9fe79deb3a5d
|
Add --wait argument in predictor to make it easier to fill in a bracket.
|
predict.py
|
predict.py
|
#!/usr/bin/env python3
import argparse
import numpy as np
import tensorflow as tf
from constants import DNN_HIDDEN_UNITS
from data_loader import load_ncaa_players, load_ncaa_schools, \
get_players_for_team
BRACKET = (
(
(
# East
(
(
("Villanova", "Mt. St. Mary's"),
("Wisconsin", "Virginia Tech"),
), (
("Virginia", "UNCW"),
("Florida", "ETSU"),
),
), (
(
("SMU", "Southern California"),
("Baylor", "New Mexico St."),
), (
("South Carolina", "Marquette"),
("Duke", "Troy"),
),
),
), (
# West
(
(
("Gonzaga", "South Dakota St."),
("Northwestern", "Vanderbilt"),
), (
("Notre Dame", "Princeton"),
("West Virginia", "Bucknell"),
),
), (
(
("Maryland", "Xavier"),
("Florida St.", "FGCU"),
), (
("Saint Mary's (CA)", "VCU"),
("Arizona", "North Dakota"),
),
),
),
),
(
(
# Midwest
(
(
("Kansas", "UC Davis"),
("Miami (FL)", "Michigan St."),
), (
("Iowa St.", "Nevada"),
("Purdue", "Vermont"),
),
), (
(
("Creighton", "Rhode Island"),
("Oregon", "Iona"),
), (
("Michigan", "Oklahoma St."),
("Louisville", "Jacksonville St."),
),
),
), (
# South
(
(
("North Carolina", "Texas Southern"),
("Arkansas", "Seton Hall"),
), (
("Minnesota", "Middle Tenn."),
("Butler", "Winthrop"),
),
), (
(
("Cincinnati", "Kansas St."),
("UCLA", "Kent St."),
), (
("Dayton", "Wichita St."),
("Kentucky", "Northern Ky."),
),
),
),
),
)
def team_id_to_name(id, all_teams):
return all_teams[all_teams["school_id"] == id]["school_name"].values[0]
def team_name_to_id(name, all_teams):
try:
return \
all_teams[all_teams["school_name"] == name]["school_id"].values[0]
except IndexError:
raise Exception("Couldn't find ID for school [%s]" % name)
def predict(estimator, all_teams, all_players, bracket):
team_a, team_b = bracket
if isinstance(team_a, tuple):
team_a = predict(estimator, all_teams, all_players, team_a)
if isinstance(team_b, tuple):
team_b = predict(estimator, all_teams, all_players, team_b)
teams = [team_a, team_b]
team_ids = [team_name_to_id(name, all_teams) for name in teams]
players_a = get_players_for_team(all_players, team_ids[0])
players_b = get_players_for_team(all_players, team_ids[1])
x = np.array([np.stack([players_a, players_b])])
# classifier tells us 1 if team_a wins, 2 if team_b wins
c = next(estimator.predict(x=x))
winner = teams[not c]
print("%s vs %s: %s wins" % (team_a, team_b, winner))
input()
return winner
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--model-in", "-m", required=True)
parser.add_argument("--year", "-y", default=2017, type=int)
args = parser.parse_args()
tf.logging.set_verbosity(tf.logging.ERROR)
players = load_ncaa_players(args.year)
all_teams = load_ncaa_schools()
example_team = get_players_for_team(players, 697)
features = np.array([np.stack([example_team, example_team])])
feature_cols = \
tf.contrib.learn.infer_real_valued_columns_from_input(features)
estimator = tf.contrib.learn.DNNClassifier(
hidden_units=DNN_HIDDEN_UNITS,
model_dir=args.model_in, feature_columns=feature_cols)
predict(estimator, all_teams, players, BRACKET)
|
Python
| 0
|
@@ -2992,16 +2992,28 @@
bracket
+, wait=False
):%0A t
@@ -3136,16 +3136,22 @@
, team_a
+, wait
)%0A if
@@ -3244,16 +3244,22 @@
, team_b
+, wait
)%0A te
@@ -3710,16 +3710,33 @@
inner))%0A
+ if wait:%0A
inpu
@@ -3948,16 +3948,114 @@
pe=int)%0A
+ parser.add_argument(%0A %22--wait%22, %22-w%22, default=False, action=%22store_const%22, const=True)%0A
args
@@ -4620,10 +4620,21 @@
BRACKET
+, args.wait
)%0A
|
a33d925cac280108b05219ddd3447be7bff821c6
|
Version bump: 0.3.1
|
durations/__init__.py
|
durations/__init__.py
|
version = (0, 3, 0)
__title__ = "durations"
__author__ = "Oleiade"
__license__ = "MIT"
__version__ = '.'.join(map(str, version))
from .duration import Duration
from .scales import Scale
|
Python
| 0.000001
|
@@ -10,17 +10,17 @@
(0, 3,
-0
+1
)%0A%0A__tit
|
e3373f8643e2dcef0ba9d2943c4583482785e67d
|
Support 1/True and False/0 for expose states as well (#655)
|
home-assistant-plugin/custom_components/xknx/expose.py
|
home-assistant-plugin/custom_components/xknx/expose.py
|
"""Exposures to KNX bus."""
from __future__ import annotations
from typing import Callable
from xknx import XKNX
from xknx.devices import DateTime, ExposeSensor
from homeassistant.const import (
CONF_ENTITY_ID,
STATE_OFF,
STATE_ON,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
)
from homeassistant.core import Event, HomeAssistant, callback
from homeassistant.helpers.event import async_track_state_change_event
from homeassistant.helpers.typing import ConfigType, StateType
from .const import KNX_ADDRESS
from .schema import ExposeSchema
@callback
def create_knx_exposure(
hass: HomeAssistant, xknx: XKNX, config: ConfigType
) -> KNXExposeSensor | KNXExposeTime:
"""Create exposures from config."""
address = config[KNX_ADDRESS]
expose_type = config[ExposeSchema.CONF_XKNX_EXPOSE_TYPE]
attribute = config.get(ExposeSchema.CONF_XKNX_EXPOSE_ATTRIBUTE)
default = config.get(ExposeSchema.CONF_XKNX_EXPOSE_DEFAULT)
exposure: KNXExposeSensor | KNXExposeTime
if (
isinstance(expose_type, str)
and expose_type.lower() in ExposeSchema.EXPOSE_TIME_TYPES
):
exposure = KNXExposeTime(xknx, expose_type, address)
else:
entity_id = config[CONF_ENTITY_ID]
exposure = KNXExposeSensor(
hass,
xknx,
expose_type,
entity_id,
attribute,
default,
address,
)
return exposure
class KNXExposeSensor:
"""Object to Expose Home Assistant entity to KNX bus."""
def __init__(
self,
hass: HomeAssistant,
xknx: XKNX,
expose_type: int | str,
entity_id: str,
attribute: str | None,
default: StateType,
address: str,
) -> None:
"""Initialize of Expose class."""
self.hass = hass
self.xknx = xknx
self.type = expose_type
self.entity_id = entity_id
self.expose_attribute = attribute
self.expose_default = default
self.address = address
self._remove_listener: Callable[[], None] | None = None
self.device: ExposeSensor = self.async_register()
@callback
def async_register(self) -> ExposeSensor:
"""Register listener."""
if self.expose_attribute is not None:
_name = self.entity_id + "__" + self.expose_attribute
else:
_name = self.entity_id
device = ExposeSensor(
self.xknx,
name=_name,
group_address=self.address,
value_type=self.type,
)
self._remove_listener = async_track_state_change_event(
self.hass, [self.entity_id], self._async_entity_changed
)
return device
@callback
def shutdown(self) -> None:
"""Prepare for deletion."""
if self._remove_listener is not None:
self._remove_listener()
self._remove_listener = None
self.device.shutdown()
async def _async_entity_changed(self, event: Event) -> None:
"""Handle entity change."""
new_state = event.data.get("new_state")
if new_state is None:
return
if new_state.state in (STATE_UNKNOWN, STATE_UNAVAILABLE):
return
old_state = event.data.get("old_state")
if self.expose_attribute is None:
if old_state is None or old_state.state != new_state.state:
# don't send same value sequentially
await self._async_set_knx_value(new_state.state)
return
new_attribute = new_state.attributes.get(self.expose_attribute)
if old_state is not None:
old_attribute = old_state.attributes.get(self.expose_attribute)
if old_attribute == new_attribute:
# don't send same value sequentially
return
await self._async_set_knx_value(new_attribute)
async def _async_set_knx_value(self, value: StateType) -> None:
"""Set new value on xknx ExposeSensor."""
assert self.device is not None
if value is None:
if self.expose_default is None:
return
value = self.expose_default
if self.type == "binary":
if value == STATE_ON:
value = True
elif value == STATE_OFF:
value = False
await self.device.set(value)
class KNXExposeTime:
"""Object to Expose Time/Date object to KNX bus."""
def __init__(self, xknx: XKNX, expose_type: str, address: str) -> None:
"""Initialize of Expose class."""
self.xknx = xknx
self.expose_type = expose_type
self.address = address
self.device: DateTime = self.async_register()
@callback
def async_register(self) -> DateTime:
"""Register listener."""
return DateTime(
self.xknx,
name=self.expose_type.capitalize(),
broadcast_type=self.expose_type.upper(),
localtime=True,
group_address=self.address,
)
@callback
def shutdown(self) -> None:
"""Prepare for deletion."""
self.device.shutdown()
|
Python
| 0
|
@@ -4254,34 +4254,38 @@
if value
-==
+in (1,
STATE_ON:%0A
@@ -4277,16 +4277,25 @@
STATE_ON
+, %22True%22)
:%0A
@@ -4352,16 +4352,50 @@
TATE_OFF
+ or value == 0 or value == %22False%22
:%0A
|
9711a2208525b200c98997248c358432a26bf7a3
|
improve cleanup_ctx_databases
|
dvhb_hybrid/config.py
|
dvhb_hybrid/config.py
|
import functools
import os
def absdir(directory, base_dir):
if not directory.startswith('/'):
directory = os.path.join(base_dir, directory)
return os.path.normpath(directory)
def dirs(list_dir, base_dir):
result = []
for i in list_dir:
result.append(absdir(i, base_dir))
return result
def convert_to_djangodb(d, name, base_dir='/tmp'):
if d.get('database'):
db = {
k.upper(): v
for k, v in d.items()
if v}
if db.pop('GIS', None):
db['ENGINE'] = 'django.contrib.gis.db.backends.postgis'
else:
db['ENGINE'] = 'django.db.backends.postgresql_psycopg2'
db['NAME'] = db.pop('DATABASE')
# Use same db name for test. Use custom config for tests to separate test and dev dbs.
db['TEST'] = {'NAME': db['NAME']}
else:
return {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(base_dir, name + '.sqlite3'),
}
return db
def db_to_settings(db_dict, base_dir):
return {
n: convert_to_djangodb(v, n, base_dir=base_dir)
for n, v in db_dict.items()
}
def convert_to_django_redis(config):
return {
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': 'redis://{host}:{port}/{db}'.format(**config),
'OPTIONS': {
'CLIENT_CLASS': 'django_redis.client.DefaultClient',
}
}
def redis_to_settings(redis_dict):
return {name: convert_to_django_redis(value) for name, value in redis_dict.items()}
async def cleanup_ctx_redis(app, cfg_key='default', app_key='redis'):
import aioredis
cfg = app.context.config.redis[cfg_key].connection
pool = await aioredis.create_pool(
(cfg.host, cfg.port),
db=cfg.db,
minsize=cfg.minsize,
maxsize=cfg.maxsize,
loop=app.loop)
app[app_key] = pool
yield
pool.close()
await pool.wait_closed()
cleanup_ctx_redis_sessions = functools.partial(
cleanup_ctx_redis, app_key='sessions', cfg_key='sessions')
async def cleanup_ctx_aiopg(app, cfg_key='default', app_key='db'):
import aiopg.sa
from dvhb_hybrid.amodels import AppModels
dbparams = app.context.config.databases.get(cfg_key)
app.models = app.m = AppModels(app)
async with aiopg.sa.create_engine(dbparams.uri) as pool:
app[app_key] = pool
yield
async def cleanup_ctx_databases(app, cfg_key='default', app_key='db'):
import asyncpgsa
from dvhb_hybrid.amodels import AppModels
dbparams = app.context.config.databases.get(cfg_key)
app.models = app.m = AppModels(app)
async with asyncpgsa.create_pool(**dbparams) as pool:
app[app_key] = pool
yield
|
Python
| 0.000001
|
@@ -10,16 +10,28 @@
nctools%0A
+import json%0A
import o
@@ -2564,104 +2564,486 @@
els%0A
- dbparams = app.context.config.databases.get(cfg_key)%0A app.models = app.m = AppModels(app)
+%0A app.models = app.m = AppModels(app)%0A%0A async def init(connection):%0A for t in %5B'json', 'jsonb'%5D:%0A await connection.set_type_codec(%0A t,%0A encoder=lambda x: x,%0A decoder=json.loads,%0A schema='pg_catalog',%0A )%0A dbparams = app.context.config.databases.get(cfg_key)%0A if 'uri' in dbparams:%0A dbargs, dbkwargs = (dbparams.uri,), %7B%7D%0A else:%0A dbargs, dbkwargs = (), dbparams%0A
%0A
@@ -3081,16 +3081,36 @@
ol(*
-*
db
-p
ar
-am
+gs, init=init, **dbkwarg
s) a
|
3ff373ed0d5349087a77b2a96af41e0e5cc9c15d
|
add UI for boardd loopback test
|
selfdrive/boardd/tests/test_boardd_loopback.py
|
selfdrive/boardd/tests/test_boardd_loopback.py
|
#!/usr/bin/env python3
import os
import random
import time
from collections import defaultdict
from functools import wraps
import cereal.messaging as messaging
from cereal import car
from common.basedir import PARAMS
from common.params import Params
from panda import Panda
from selfdrive.boardd.boardd import can_list_to_can_capnp
from selfdrive.car import make_can_msg
from selfdrive.test.helpers import with_processes
def reset_panda(fn):
@wraps(fn)
def wrapper():
p = Panda()
for i in [0, 1, 2, 0xFFFF]:
p.can_clear(i)
p.reset()
p.close()
fn()
return wrapper
os.environ['STARTED'] = '1'
os.environ['BOARDD_LOOPBACK'] = '1'
os.environ['PARAMS_PATH'] = PARAMS
@reset_panda
@with_processes(['boardd'])
def test_boardd_loopback():
# wait for boardd to init
time.sleep(2)
# boardd blocks on CarVin and CarParams
cp = car.CarParams.new_message()
cp.safetyModel = car.CarParams.SafetyModel.allOutput
Params().put("CarVin", b"0"*17)
Params().put("CarParams", cp.to_bytes())
sendcan = messaging.pub_sock('sendcan')
can = messaging.sub_sock('can', conflate=False, timeout=100)
time.sleep(1)
for i in range(1000):
sent_msgs = defaultdict(set)
for _ in range(random.randrange(10)):
to_send = []
for __ in range(random.randrange(100)):
bus = random.randrange(3)
addr = random.randrange(1, 1<<29)
dat = bytes([random.getrandbits(8) for _ in range(random.randrange(1, 9))])
sent_msgs[bus].add((addr, dat))
to_send.append(make_can_msg(addr, dat, bus))
sendcan.send(can_list_to_can_capnp(to_send, msgtype='sendcan'))
max_recv = 10
while max_recv > 0 and any(len(sent_msgs[bus]) for bus in range(3)):
recvd = messaging.drain_sock(can, wait_for_one=True)
for msg in recvd:
for m in msg.can:
if m.src >= 128:
k = (m.address, m.dat)
assert k in sent_msgs[m.src-128]
sent_msgs[m.src-128].discard(k)
max_recv -= 1
# if a set isn't empty, messages got dropped
for bus in range(3):
assert not len(sent_msgs[bus]), f"loop {i}: bus {bus} missing {len(sent_msgs[bus])} messages"
|
Python
| 0
|
@@ -244,16 +244,51 @@
Params%0A
+from common.spinner import Spinner%0A
from pan
@@ -303,16 +303,16 @@
t Panda%0A
-
from sel
@@ -796,16 +796,17 @@
ack():%0A%0A
+%0A
# wait
@@ -825,16 +825,38 @@
to init%0A
+ spinner = Spinner()%0A
time.s
@@ -1197,16 +1197,27 @@
eep(1)%0A%0A
+ n = 1000%0A
for i
@@ -1229,14 +1229,59 @@
nge(
-1000):
+n):%0A spinner.update(f%22boardd loopback %7Bi%7D/%7Bn%7D%22)%0A
%0A
@@ -2187,16 +2187,16 @@
nge(3):%0A
-
as
@@ -2287,8 +2287,27 @@
ssages%22%0A
+%0A spinner.close()%0A
|
e6371d63c15974b0aadc0f141b4a64bdd3b89905
|
Update isamruntime.py
|
ibmsecurity/isam/aac/server_connections/isamruntime.py
|
ibmsecurity/isam/aac/server_connections/isamruntime.py
|
import logging
from ibmsecurity.utilities import tools
logger = logging.getLogger(__name__)
uri = "/mga/server_connections/isamruntime"
requires_modules = ["mga", "federation"]
requires_version = "9.0.5.0"
def get_all(isamAppliance, check_mode=False, force=False):
"""
Retrieving a list of all ISAM Runtime server connections
"""
return isamAppliance.invoke_get("Retrieving a list of all ISAM Runtime server connections",
"{0}/v1".format(uri),
requires_modules=requires_modules, requires_version=requires_version)
def get(isamAppliance, name, check_mode=False, force=False):
"""
Retrieving an ISAM Runtime server connection
"""
ret_obj = search(isamAppliance, name=name)
id = ret_obj['data']
if id == {}:
return isamAppliance.create_return_object()
else:
return isamAppliance.invoke_get("Retrieving an ISAM Runtime server connection",
"{0}/{1}/v1".format(uri, id),
requires_modules=requires_modules, requires_version=requires_version)
def add(isamAppliance, name, connection, description="", locked=False, check_mode=False, force=False):
"""
Creating an ISAM Runtime server connection
"""
if force is True or _check_exists(isamAppliance, name=name) is False:
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_post(
"Creating an ISAM Runtime server connection",
"{0}/v1".format(uri),
{
"locked": locked,
"description": description,
"type": "isamruntime",
"connection": connection,
"name": name
},
requires_modules=requires_modules, requires_version=requires_version
)
return isamAppliance.create_return_object()
def update(isamAppliance, name, connection, locked=False, description='', new_name=None, ignore_password_for_idempotency=False, check_mode=False, force=False):
"""
Modifying an ISAM Runtime server connection
Use new_name to rename the connection.
"""
ret_obj = get(isamAppliance, name)
warnings = ret_obj["warnings"]
if ret_obj["data"] == {}:
warnings.append("ISAM Runtime server connection {0} not found, skipping update.".format(name))
return isamAppliance.create_return_object(warnings=warnings)
else:
id = ret_obj["data"]["uuid"]
needs_update = False
json_data = _create_json(name=name, description=description, locked=locked, connection=connection)
if new_name is not None: # Rename condition
json_data['name'] = new_name
if force is not True:
if 'uuid' in ret_obj['data']:
del ret_obj['data']['uuid']
if ignore_password_for_idempotency:
if 'bindPwd' in connection:
warnings.append("Request made to ignore bindPwd for idempotency check.")
connection.pop('bindPwd', None)
sorted_ret_obj = tools.json_sort(ret_obj['data'])
sorted_json_data = tools.json_sort(json_data)
logger.debug("Sorted Existing Data:{0}".format(sorted_ret_obj))
logger.debug("Sorted Desired Data:{0}".format(sorted_json_data))
if sorted_ret_obj != sorted_json_data:
needs_update = True
if force is True or needs_update is True:
if check_mode is True:
return isamAppliance.create_return_object(changed=True, warnings=warnings)
else:
return isamAppliance.invoke_put(
"Modifying an ISAM Runtime server connection",
"{0}/{1}/v1".format(uri, id),
json_data,
requires_modules=requires_modules, requires_version=requires_version
)
return isamAppliance.create_return_object(warnings=warnings)
def set(isamAppliance, name, locked=False, connection=None, description=None, new_name=None, ignore_password_for_idempotency=False, check_mode=False, force=False):
"""
Creating or Modifying a isamruntime
"""
ret_obj = search(isamAppliance, name=name)
id = ret_obj['data']
if id == {}:
# If no uuid was found, Force the add
return add(isamAppliance, name=name, locked=locked, connection=connection, description=description,
check_mode=check_mode, force=True)
else:
# Update isamruntime
return update(isamAppliance, name=name, locked=locked, connection=connection, description=description,
new_name=new_name, ignore_password_for_idempotency=ignore_password_for_idempotency, check_mode=check_mode, force=force)
def delete(isamAppliance, name, check_mode=False, force=False):
"""
Deleting an ISAM Runtime server connection
"""
ret_obj = search(isamAppliance, name, check_mode=False, force=False)
id = ret_obj['data']
if force is True or id != {}:
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_delete(
"Deleting an ISAM Runtime server connection",
"{0}/{1}/v1".format(uri, id),
requires_modules=requires_modules, requires_version=requires_version
)
return isamAppliance.create_return_object()
def compare(isamAppliance1, isamAppliance2):
"""
Compare isamruntime between two appliances
"""
ret_obj1 = get_all(isamAppliance1)
ret_obj2 = get_all(isamAppliance2)
for obj in ret_obj1['data']:
del obj['uuid']
for obj in ret_obj2['data']:
del obj['uuid']
return tools.json_compare(ret_obj1, ret_obj2, deleted_keys=['uuid'])
def search(isamAppliance, name, force=False, check_mode=False):
"""
Retrieve ID for isamruntime
"""
ret_obj = get_all(isamAppliance)
ret_obj_new = isamAppliance.create_return_object()
for obj in ret_obj['data']:
if obj['name'] == name:
logger.info("Found isamruntime '{0}' uuid: '{1}'".format(name, obj['uuid']))
ret_obj_new['data'] = obj['uuid']
return ret_obj_new
def _check_exists(isamAppliance, name=None, id=None):
"""
Check if ISAM runtime Connection already exists
"""
ret_obj = get_all(isamAppliance)
for obj in ret_obj['data']:
if (name is not None and obj['name'] == name) or (id is not None and obj['uuid'] == id):
return True
return False
def _create_json(name, connection, description, locked):
"""
Create a JSON to be used for the REST API call
"""
json = {
"connection": connection,
"type": "isamruntime",
"name": name,
"description": description,
"locked": locked
}
return json
|
Python
| 0.000002
|
@@ -2256,28 +2256,24 @@
connection%0A
-
%0A Use new
@@ -2304,20 +2304,16 @@
ection.%0A
-
%0A %22%22%22
@@ -4976,20 +4976,16 @@
nection%0A
-
%0A %22%22%22
|
896a90f7fa3c28f9232ebe701e776bfc7785292d
|
Fix inventory update xml data
|
product.py
|
product.py
|
# -*- coding: UTF-8 -*-
'''
product
'''
from decimal import Decimal
from lxml import etree
from lxml.builder import E
from collections import defaultdict
from trytond.model import fields
from trytond.transaction import Transaction
from trytond.pool import PoolMeta, Pool
from trytond.pyson import Eval
__all__ = [
'Product', 'ProductCode', 'Template', 'ProductSaleChannelListing',
]
__metaclass__ = PoolMeta
class Template:
"Product Template"
__name__ = 'product.template'
export_to_amazon = fields.Boolean('Amazon Exportable')
class Product:
"Product"
__name__ = "product.product"
asin = fields.Function(fields.Many2One(
'product.product.code', 'ASIN'
), 'get_codes')
ean = fields.Function(fields.Many2One(
'product.product.code', 'EAN'
), 'get_codes')
upc = fields.Function(fields.Many2One(
'product.product.code', 'UPC'
), 'get_codes')
isbn = fields.Function(fields.Many2One(
'product.product.code', 'ISBN'
), 'get_codes')
gtin = fields.Function(fields.Many2One(
'product.product.code', 'GTIN'
), 'get_codes')
@classmethod
def get_codes(cls, products, names):
ProductCode = Pool().get('product.product.code')
res = {}
for name in names:
res[name] = {}
for product in products:
code = ProductCode.search([
('product', '=', product.id),
('code_type', '=', name)
])
res[name][product.id] = code and code[0].id or None
return res
@classmethod
def extract_product_values_from_amazon_data(cls, product_attributes):
"""
Extract product values from the amazon data, used for
creation of product. This method can be overwritten by
custom modules to store extra info to a product
:param product_data: Product data from amazon
:returns: Dictionary of values
"""
SaleChannel = Pool().get('sale.channel')
amazon_channel = SaleChannel(
Transaction().context['current_channel']
)
assert amazon_channel.source == 'amazon_mws'
return {
'name': product_attributes['Title']['value'],
'default_uom': amazon_channel.default_uom.id,
'salable': True,
'sale_uom': amazon_channel.default_uom.id,
}
@classmethod
def create_from(cls, channel, product_data):
"""
Create the product for the channel
"""
if channel.source != 'amazon_mws':
return super(Product, cls).create_from(channel, product_data)
return cls.create_using_amazon_data(product_data)
@classmethod
def create_using_amazon_data(cls, product_data):
"""
Create a new product with the `product_data` from amazon.
:param product_data: Product Data from Amazon
:returns: Active record of product created
"""
Template = Pool().get('product.template')
Currency = Pool().get('currency.currency')
SaleChannel = Pool().get('sale.channel')
# TODO: Handle attribute sets in multiple languages
product_attribute_set = product_data['Products']['Product'][
'AttributeSets'
]
if isinstance(product_attribute_set, dict):
product_attributes = product_attribute_set['ItemAttributes']
else:
product_attributes = product_attribute_set[0]['ItemAttributes']
product_values = cls.extract_product_values_from_amazon_data(
product_attributes
)
amazon_channel = SaleChannel(
Transaction().context['current_channel']
)
assert amazon_channel.source == 'amazon_mws'
list_price = Decimal('0.01')
if product_attributes.get('ListPrice'):
list_price = product_attributes['ListPrice']['Amount']['value']
currency_code = product_attributes['ListPrice']['CurrencyCode']['value'] # noqa
currency, = Currency.search([
('code', '=', currency_code),
], limit=1)
list_price = Currency.compute(
currency, Decimal(list_price),
amazon_channel.company.currency
)
product_values.update({
'products': [('create', [{
'code': product_data['Id']['value'],
'list_price': list_price,
'cost_price': list_price,
'description': product_attributes['Title']['value'],
}])],
})
product_template, = Template.create([product_values])
return product_template.products[0]
class ProductCode:
"Amazon Product Identifier"
__name__ = 'product.product.code'
@classmethod
def __setup__(cls):
"""
Setup the class before adding to pool
"""
super(ProductCode, cls).__setup__()
cls.code_type.selection.extend([
('upc', 'UPC'),
('isbn', 'ISBN'),
('asin', 'ASIN'),
('gtin', 'GTIN')
])
class ProductSaleChannelListing:
"Product Sale Channel"
__name__ = 'product.product.channel_listing'
asin = fields.Char('ASIN', states={
'required': Eval('channel_source') == 'amazon_mws',
'invisible': Eval('channel_source') != 'amazon_mws',
}, depends=['channel_source'])
fba_code = fields.Char('Code (Fulfilled By Amazon', states={
'invisible': Eval('channel_source') == 'amazon_mws',
}, depends=['channel_source'])
def export_inventory(self):
"""
Export inventory of this listing to external channel
"""
if self.channel.source != 'amazon_mws':
return super(ProductSaleChannelListing, self).export_inventory()
self.export_bulk_inventory([self])
@classmethod
def export_bulk_inventory(cls, listings):
"""
bulk export inventory to amazon
"""
if not listings:
# Nothing to update
return
non_amazon_listings = cls.search([
('id', 'in', map(int, listings)),
('channel.source', '!=', 'amazon_mws'),
])
if non_amazon_listings:
super(ProductSaleChannelListing, cls).export_bulk_inventory(
non_amazon_listings
)
amazon_listings = filter(
lambda l: l not in non_amazon_listings, listings
)
inventory_channel_map = defaultdict(list)
for listing in amazon_listings:
product = listing.product
channel = listing.channel
# group inventory xml by channel
inventory_channel_map[channel].append(E.Message(
E.MessageID(str(product.id)),
E.OperationType('Update'),
E.Inventory(
E.SKU(listing.product_identifier),
E.Quantity(str(round(listing.quantity))),
E.FulfillmentLatency(
str(product.delivery_time)
),
)
))
for channel, elements in inventory_channel_map.iteritems():
envelope_xml = channel._get_amazon_envelop('Inventory', elements)
feeds_api = channel.get_amazon_feed_api()
feeds_api.submit_feed(
etree.tostring(envelope_xml),
feed_type='_POST_INVENTORY_AVAILABILITY_DATA_',
marketplaceids=[channel.amazon_marketplace_id]
)
|
Python
| 0
|
@@ -6996,20 +6996,23 @@
uantity(
-str(
+%22%25d%22 %25
round(li
@@ -7027,17 +7027,16 @@
antity))
-)
,%0A
@@ -7095,19 +7095,26 @@
-str
+%22%25d%22 %25 max
(product
@@ -7127,16 +7127,19 @@
ery_time
+, 1
)%0A
|
ca27dac40aa48a0df073433c489b73f361e34739
|
fix tabs
|
sfa/methods/register.py
|
sfa/methods/register.py
|
### $Id$
### $URL$
from sfa.trust.certificate import Keypair, convert_public_key
from sfa.trust.gid import *
from sfa.util.faults import *
from sfa.util.misc import *
from sfa.util.method import Method
from sfa.util.parameter import Parameter, Mixed
from sfa.util.record import GeniRecord
from sfa.util.debug import log
from sfa.trust.auth import Auth
from sfa.trust.gid import create_uuid
class register(Method):
"""
Register an object with the registry. In addition to being stored in the
Geni database, the appropriate records will also be created in the
PLC databases
@param cred credential string
@param record_dict dictionary containing record fields
@return gid string representation
"""
interfaces = ['registry']
accepts = [
Parameter(str, "Credential string"),
Parameter(dict, "Record dictionary containing record fields")
]
returns = Parameter(int, "String representation of gid object")
def call(self, cred, record_dict):
self.api.auth.check(cred, "register")
record = GeniRecord(dict = record_dict)
type = record.get_type()
name = record.get_name()
self.api.auth.verify_object_permission(name)
auth_name = self.api.auth.get_authority(name)
auth_info = self.api.auth.get_auth_info(auth_name)
table = self.api.auth.get_auth_table(auth_name)
# make sure record has a gid
if 'gid' not in record:
uuid = create_uuid()
pkey = Keypair(create=True)
if 'key' in record and record['key']:
pkey = convert_public_key(record['key'])
gid_object = self.api.auth.hierarchy.create_gid(name, uuid, pkey)
gid = gid_object.save_to_string(save_parents=True)
record['gid'] = gid
record.set_gid(gid)
# check if record already exists
existing_records = table.resolve(type, name)
if existing_records:
raise ExistingRecord(name)
if type in ["authority", "sa", "ma"]:
# update the tree
if not self.api.auth.hierarchy.auth_exists(name):
self.api.auth.hierarchy.create_auth(name)
# authorities are special since they are managed by the registry
# rather than by the caller. We create our own GID for the
# authority rather than relying on the caller to supply one.
# get the GID from the newly created authority
child_auth_info = self.api.auth.get_auth_info(name)
gid = auth_info.get_gid_object()
record.set_gid(gid.save_to_string(save_parents=True))
# if registering a sa, see if a ma already exists
# if registering a ma, see if a sa already exists
if type in ["authority", "sa", "ma"]:
other_rec = table.resolve("authority", record.get_name())
if other_rec:
print >> log, "linking ma and sa to the same plc site"
pointer = other_rec[0].get_pointer()
else:
pl_record = self.api.geni_fields_to_pl_fields(type, name, record)
print >> log, "adding site with fields", pl_record
pointer = self.api.plshell.AddSite(self.api.plauth, pl_record)
record.set_pointer(pointer)
elif (type == "slice"):
pl_record = self.api.geni_fields_to_pl_fields(type, name, record)
pointer = self.api.plshell.AddSlice(self.api.plauth, pl_record)
record.set_pointer(pointer)
elif (type == "user"):
pointer = self.api.plshell.AddPerson(self.api.plauth, dict(record))
if 'enabled' in record and record['enabled']:
self.api.plshell.UpdatePerson(self.api.plauth, pointer, {'enabled': record['enabled']})
login_base = get_leaf(auth_info.hrn)
self.api.plshell.AddPersonToSite(self.api.plauth, pointer, login_base)
# What roles should this user have?
self.api.plshell.AddRoleToPerson(self.api.plauth, 'user', pointer)
record.set_pointer(pointer)
# Add the user's key
if record['key']:
self.api.plshell.AddPersonKey(self.api.plauth, pointer, {'key_type' : 'ssh', 'key' : record['key']})
elif (type == "node"):
pl_record = self.api.geni_fields_to_pl_fields(type, name, record)
login_base = hrn_to_pl_login_base(auth_name)
pointer = self.api.plshell.AddNode(self.api.plauth, login_base, pl_record)
record.set_pointer(pointer)
else:
raise UnknownGeniType(type)
# SFA upcalls may exist in PLCAPI and they could have already added the
# record for us. Lets check if the record already exists
existing_records = table.resolve(type, name)
if not existing_records:
table.insert(record)
# update membership for researchers, pis, owners, operators
self.api.update_membership(None, record)
return record.get_gid_object().save_to_string(save_parents=True)
|
Python
| 0.000001
|
@@ -4246,25 +4246,59 @@
if
-record%5B'key'%5D:%0A%09%09
+'key' in record and record%5B'key'%5D:%0A
self
|
4019372609565b074a5c3ba946245b61c8479ada
|
update dev version after 2.1.0 tag [skip ci]
|
py/fiberassign/_version.py
|
py/fiberassign/_version.py
|
__version__ = '2.1.0'
|
Python
| 0
|
@@ -13,10 +13,18 @@
= '2.1.0
+.dev2650
'%0A
|
e5eaed4ef7d5a00958f2ee8a21c66078a065dec7
|
Upgrade rkt to v1.29.0 for bug fix #3750
|
py/ops/ops/onboard/deps.py
|
py/ops/ops/onboard/deps.py
|
__all__ = [
'deps',
]
import collections
import logging
import urllib.parse
from pathlib import Path
from tempfile import TemporaryDirectory
from garage import apps
from garage import scripts
LOG = logging.getLogger(__name__)
# TODO: Use typing.NamedTuple and the new annotation syntax when all
# systems are upgraded to Python 3.6
Package = collections.namedtuple('Package', [
'name',
# TODO: Support multiple versions
'version',
'uri',
'checksum',
'strip_components',
'install',
])
PACKAGES = {}
def define_package(**kwargs):
def decorate(install):
package = Package(name=install.__name__, install=install, **kwargs)
PACKAGES[package.name] = package
return install
return decorate
@define_package(
version='1.25.0',
uri='https://github.com/coreos/rkt/releases/download/v1.25.0/rkt-v1.25.0.tar.gz',
checksum='sha512-6a65f51af793df4fe054dd1a8f791bcf2e30c6a15593b908515a6616835490cad03d9d927b1c88dd38b77647a9a5f9e40ffba913b92e5c2d6f141a758e0805d8',
strip_components=1,
)
def rkt(package):
if Path('/usr/bin/rkt').exists():
LOG.warning('attempt to overwrite /usr/bin/rkt')
cmds = [
# Don't install api and metadata service for now.
'cp init/systemd/rkt-gc.service /lib/systemd/system'.split(),
'cp init/systemd/rkt-gc.timer /lib/systemd/system'.split(),
'cp init/systemd/tmpfiles.d/rkt.conf /usr/lib/tmpfiles.d'.split(),
'./scripts/setup-data-dir.sh'.split(),
# Install rkt only if everything above succeeds.
'cp rkt /usr/bin'.split(),
# Fetch stage 1.
['rkt', 'trust',
'--prefix', 'coreos.com/rkt/stage1-coreos',
'--skip-fingerprint-review'],
['rkt', 'fetch', 'coreos.com/rkt/stage1-coreos:' + package.version],
]
with scripts.using_sudo():
for cmd in cmds:
scripts.execute(cmd)
scripts.systemctl_enable('rkt-gc.timer')
scripts.systemctl_start('rkt-gc.timer')
@apps.with_prog('list')
@apps.with_help('list supported external packages')
def list_(_):
"""List supported external packages."""
for package_name in sorted(PACKAGES):
package = PACKAGES[package_name]
print('%s:%s' % (package_name, package.version))
return 0
@apps.with_help('install external package')
@apps.with_argument(
'--tarball', metavar='PATH',
help='use local tarball instead',
)
@apps.with_argument(
'package',
help='choose package (format: "name:version")',
)
def install(args):
"""Install external package."""
package_name, package_version = args.package.split(':', maxsplit=1)
package = PACKAGES.get(package_name)
if package is None:
raise RuntimeError('unknown package: %s' % args.package)
if package_version != 'latest' and package_version != package.version:
raise RuntimeError('unsupported package version: %s' % args.package)
with TemporaryDirectory() as staging_dir:
staging_dir = Path(staging_dir)
if args.tarball:
tarball_path = scripts.ensure_file(Path(args.tarball).resolve())
else:
tarball_path = urllib.parse.urlparse(package.uri).path
tarball_path = staging_dir / Path(tarball_path).name
scripts.wget(package.uri, tarball_path)
scripts.ensure_checksum(tarball_path, package.checksum)
with scripts.directory(staging_dir):
if package.strip_components > 0:
tar_extra_flags = [
'--strip-components', package.strip_components,
]
else:
tar_extra_flags = []
scripts.tar_extract(tarball_path, tar_extra_flags=tar_extra_flags)
package.install(package)
return 0
@apps.with_help('manage external dependencies')
@apps.with_defaults(no_locking_required=True)
@apps.with_apps(
'operation', 'operation on external dependencies',
list_,
install,
)
def deps(args):
"""\
Manage external dependencies that will not be installed from distro
package manager.
"""
return args.operation(args)
|
Python
| 0
|
@@ -785,17 +785,17 @@
ion='1.2
-5
+9
.0',%0A
@@ -819,22 +819,19 @@
hub.com/
-coreos
+rkt
/rkt/rel
@@ -849,17 +849,17 @@
oad/v1.2
-5
+9
.0/rkt-v
@@ -861,17 +861,17 @@
rkt-v1.2
-5
+9
.0.tar.g
@@ -899,136 +899,136 @@
512-
-6a65f51af793df4fe054dd1a8f791bcf2e30c6a15593b908515a6616835490cad03d9d927b1c88dd38b77647a9a5f9e40ffba913b92e5c2d6f141a758e0805d8
+80d669b97f5739e5d594828ff3d0a02db59dadd179b90a0f192b24fdabf14709d96329c3d09a8b4fc3f3e07874f551f48118e0b62400e53e5df2b8c574e7a841
',%0A
|
9602574af41a9c09edbc84bf77bde3a285d71741
|
use datastore client in example
|
examples/google/radiology/upload_storage_radiology.py
|
examples/google/radiology/upload_storage_radiology.py
|
#!/usr/bin/env python
# RADIOLOGY ---------------------------------------------------
# This is an example script to upload data (images, text, metadata) to
# google cloud storage and datastore. Data MUST be de-identified
import os
# Start google storage client for pmc-stanford
from som.api.google import Client
client = Client(use_bigquery=False, bucket_name='radiology')
collection = client.create_collection(uid='IRB41449')
# Let's load some dummy data from deid
from deid.data import get_dataset
from deid.dicom import get_files
dicom_files = get_files(get_dataset('dicom-cookies'))
# Now de-identify to get clean files
from deid.dicom import get_identifiers, replace_identifiers
ids=get_identifiers(dicom_files)
updated_files = replace_identifiers(dicom_files=dicom_files,
ids=ids)
# Define some metadata for the entity
metadata = { "source_id" : "cookieTumorDatabase",
"id":"cookie-47",
"Modality": "cookie"}
# Upload the dataset
client.upload_dataset(images=updated_files,
collection=collection,
uid=metadata['id'],
entity_metadata=metadata)
# Now try with adding metadata for an image
images_metadata = {
updated_files[0]:
{
"Modality":"cookie",
"Type": "chocolate-chip",
"Width": 350,
"Height": 350
}
}
# And again do the call
client.upload_dataset(images=updated_files,
collection=collection,
uid="cookie-47",
images_metadata=images_metadata)
|
Python
| 0
|
@@ -294,23 +294,52 @@
i.google
- import
+.datastore import DataStoreClient as
Client%0A
@@ -358,28 +358,8 @@
ent(
-use_bigquery=False,
buck
@@ -378,16 +378,47 @@
iology')
+%0A%0A# big_query not developed yet
%0Acollect
|
0e6f1610065a2a3bffed3446205cf3717bd6adaa
|
fix typo
|
py/test/plugin/hookspec.py
|
py/test/plugin/hookspec.py
|
"""
py.test hooks / extension points
"""
# ------------------------------------------------------------------------------
# Command line and configuration hooks
# ------------------------------------------------------------------------------
def pytest_addoption(parser):
""" called before commandline parsing. """
def pytest_configure(config):
""" called after command line options have been parsed.
and all plugins and initial conftest files been loaded.
``config`` provides access to all such configuration values.
"""
def pytest_namespace(config):
""" return dict of name->object to become available at py.test.*"""
def pytest_unconfigure(config):
""" called before test process is exited. """
# ------------------------------------------------------------------------------
# test Session related hooks
# ------------------------------------------------------------------------------
def pytest_sessionstart(session):
""" before session.main() is called. """
def pytest_sessionfinish(session, exitstatus, excrepr=None):
""" whole test run finishes. """
def pytest_deselected(items):
""" repeatedly called for test items deselected by keyword. """
# ------------------------------------------------------------------------------
# collection hooks
# ------------------------------------------------------------------------------
def pytest_collect_directory(path, parent):
""" return Collection node or None for the given path. """
def pytest_collect_file(path, parent):
""" return Collection node or None for the given path. """
def pytest_collectstart(collector):
""" collector starts collecting. """
def pytest_collectreport(rep):
""" collector finished collecting. """
def pytest_make_collect_report(collector):
""" perform a collection and return a collection. """
pytest_make_collect_report.firstresult = True
# XXX rename to item_collected()? meaning in distribution context?
def pytest_itemstart(item, node=None):
""" test item gets collected. """
# ------------------------------------------------------------------------------
# Python test function related hooks
# ------------------------------------------------------------------------------
def pytest_pycollect_makeitem(collector, name, obj):
""" return custom item/collector for a python object in a module, or None. """
pytest_pycollect_makeitem.firstresult = True
def pytest_pyfunc_call(pyfuncitem):
""" perform function call to the with the given function arguments. """
pytest_pyfunc_call.firstresult = True
def pytest_generate_tests(metafunc):
""" generate (multiple) parametrized calls to a test function."""
# ------------------------------------------------------------------------------
# generic runtest related hooks
# ------------------------------------------------------------------------------
def pytest_runtest_setup(item):
""" called before pytest_runtest_call(). """
def pytest_runtest_call(item):
""" execute test item. """
def pytest_runtest_teardown(item):
""" called after pytest_runtest_call(). """
def pytest_runtest_protocol(item):
""" run given test item and return test report. """
pytest_runtest_protocol.firstresult = True
def pytest_runtest_makereport(item, call):
""" make ItemTestReport for the specified test outcome. """
pytest_runtest_makereport.firstresult = True
def pytest_runtest_logreport(rep):
""" process item test report. """
# ------------------------------------------------------------------------------
# generic reporting hooks (invoked from pytest_terminal.py)
# ------------------------------------------------------------------------------
def pytest_report_teststatus(rep):
""" return shortletter and verbose word. """
pytest_report_teststatus.firstresult = True
def pytest_terminal_summary(terminalreporter):
""" add additional section in terminal summary reporting. """
def pytest_report_iteminfo(item):
""" return (fspath, lineno, name) for the item.
the information is used for result display and to sort tests
"""
pytest_report_iteminfo.firstresult = True
# ------------------------------------------------------------------------------
# doctest hooks
# ------------------------------------------------------------------------------
def pytest_doctest_prepare_content(content):
""" return processed content for a given doctest"""
pytest_doctest_prepare_content.firstresult = True
# ------------------------------------------------------------------------------
# misc hooks
# ------------------------------------------------------------------------------
def pytest_plugin_registered(plugin):
""" a new py lib plugin got registered. """
def pytest_plugin_unregistered(plugin):
""" a py lib plugin got unregistered. """
def pytest_internalerror(excrepr):
""" called for internal errors. """
def pytest_trace(category, msg):
""" called for debug info. """
# ------------------------------------------------------------------------------
# distributed testing
# ------------------------------------------------------------------------------
def pytest_testnodeready(node):
""" Test Node is ready to operate. """
def pytest_testnodedown(node, error):
""" Test Node is down. """
def pytest_rescheduleitems(items):
""" reschedule Items from a node that went down. """
def pytest_looponfailinfo(failreports, rootdirs):
""" info for repeating failing tests. """
|
Python
| 0.003968
|
@@ -3343,22 +3343,27 @@
the
-specified test
+given item and call
out
@@ -3367,24 +3367,25 @@
outcome. %22%22%22
+
%0Apytest_runt
|
e5dc3bc75ce83b4069f2626705f878f197a13314
|
Update `tfds new` CLI
|
tensorflow_datasets/scripts/cli/builder_templates.py
|
tensorflow_datasets/scripts/cli/builder_templates.py
|
# coding=utf-8
# Copyright 2022 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Templates to generate dataset builders.
Depending on the given format, it will generate code to add format-specific
dataset builders to tfds.
"""
import textwrap
from tensorflow_datasets.scripts.cli import cli_utils
# Names of dataset builders.
STANDARD = 'standard'
CONLL = 'conll'
def create_builder_template(info: cli_utils.DatasetInfo):
"""Calls the required (possibly format-specific) dataset builder template.
Args:
info: `cli_utils.DatasetInfo` object containing all information about the
dataset necessary to generate a template for the new dataset.
Returns:
A string containing the builder-specific template for the new dataset, to be
filled by the user.
"""
if info.data_format == STANDARD:
return _standard_template(info)
elif info.data_format == CONLL:
return _conll_template(info)
else:
raise ValueError(
f'Required format {info.data_format} isn\'t associated with '
'a format-specific builder in TFDS.')
def _standard_template(info: cli_utils.DatasetInfo) -> str:
"""Returns a template for a `tfds.core.GeneratorBasedBuilder`."""
content = textwrap.dedent(f'''\
"""{info.name} dataset."""
import {info.tfds_api} as tfds
# {info.todo}: Markdown description that will appear on the catalog page.
_DESCRIPTION = """
Description is **formatted** as markdown.
It should also contain any processing which has been applied (if any),
(e.g. corrupted example skipped, images cropped,...):
"""
# {info.todo}: BibTeX citation
_CITATION = """
"""
class {info.cls_name}(tfds.core.GeneratorBasedBuilder):
"""DatasetBuilder for {info.name} dataset."""
VERSION = tfds.core.Version('1.0.0')
RELEASE_NOTES = {{
'1.0.0': 'Initial release.',
}}
def _info(self) -> tfds.core.DatasetInfo:
"""Returns the dataset metadata."""
# {info.todo}: Specifies the tfds.core.DatasetInfo object
return tfds.core.DatasetInfo(
builder=self,
description=_DESCRIPTION,
features=tfds.features.FeaturesDict({{
# These are the features of your dataset like images, labels ...
'image': tfds.features.Image(shape=(None, None, 3)),
'label': tfds.features.ClassLabel(names=['no', 'yes']),
}}),
# If there's a common (input, target) tuple from the
# features, specify them here. They'll be used if
# `as_supervised=True` in `builder.as_dataset`.
supervised_keys=('image', 'label'), # Set to `None` to disable
homepage='https://dataset-homepage/',
citation=_CITATION,
)
def _split_generators(self, dl_manager: tfds.download.DownloadManager):
"""Returns SplitGenerators."""
# {info.todo}: Downloads the data and defines the splits
path = dl_manager.download_and_extract('https://todo-data-url')
# {info.todo}: Returns the Dict[split names, Iterator[Key, Example]]
return {{
'train': self._generate_examples(path / 'train_imgs'),
}}
def _generate_examples(self, path):
"""Yields examples."""
# {info.todo}: Yields (key, example) tuples from the dataset
for f in path.glob('*.jpeg'):
yield 'key', {{
'image': f,
'label': 'yes',
}}
''')
return content
def _conll_template(info: cli_utils.DatasetInfo) -> str:
"""A template for ConllDatasetBuilder."""
content = textwrap.dedent(f'''\
"""{info.name} dataset."""
from tensorflow_datasets.core.dataset_builders import conll_dataset_builder_utils as conll_lib
import {info.tfds_api} as tfds
# {info.todo}: Markdown description that will appear on the catalog page.
_DESCRIPTION = """
Description is **formatted** as markdown.
It should also contain any processing which has been applied (if any),
(e.g. corrupted example skipped, images cropped,...):
"""
# {info.todo}: BibTeX citation
_CITATION = """
"""
class {info.cls_name}(tfds.dataset_builders.ConllDatasetBuilder):
"""DatasetBuilder for {info.name} dataset."""
VERSION = tfds.core.Version('1.0.0')
RELEASE_NOTES = {{
'1.0.0': 'Initial release.',
}}
# {info.todo}: Add details about the dataset's features.
BUILDER_CONFIGS = [conll_lib.CONLL_2002_CONFIG]
def _info(self) -> tfds.core.DatasetInfo:
"""Returns the dataset metadata."""
# {info.todo}: Specifies the dataset infos.
return self.create_dataset_info(
description=_DESCRIPTION,
homepage="",
citation=_CITATION,
)
def _split_generators(self, dl_manager: tfds.download.DownloadManager):
"""Returns SplitGenerators."""
# {info.todo}: Downloads the data and defines the splits
path = dl_manager.download_and_extract('https://todo-data-url')
return {{
'train': self._generate_examples(path / 'train.txt',)
}}
# {info.todo}: If you need a customized _generate_examples function,
# comment out the following.
# def _generate_examples(
# self,
# path: Union[epath.PathLike, List[epath.PathLike]],
# features_order: List[str],
# separator: str = "\t") -> split_builder_lib.SplitGenerator:
# """Yields (key, example) examples."""
# pass
''')
return content
|
Python
| 0
|
@@ -4375,16 +4375,22 @@
builders
+.conll
import
@@ -5158,16 +5158,77 @@
atures.%0A
+ # conll_lib contains a set of ready-to-use features.%0A
@@ -6189,108 +6189,9 @@
#
- features_order: List%5Bstr%5D,%0A # separator: str = %22%5Ct%22) -%3E split_builder_lib.SplitGenerator
+)
:%0A
|
951b6b9cc14e323dc97aa6e67dee17ef110e673f
|
check for exclusive try/else and if/else
|
pychecker2/utest/scopes.py
|
pychecker2/utest/scopes.py
|
from pychecker2.TestSupport import WarningTester
from pychecker2 import ScopeChecks
class RedefinedTestCase(WarningTester):
def testScopes(self):
self.warning('def f(): pass\n'
'def f(): pass\n',
1, ScopeChecks.RedefineCheck.redefinedScope, 'f', 2)
self.warning('class C:\n'
' def g(self): pass\n'
' def g(self): pass\n',
2, ScopeChecks.RedefineCheck.redefinedScope, 'g', 3)
self.silent('def s(): pass\n'
'def f(): pass\n')
|
Python
| 0
|
@@ -144,16 +144,69 @@
(self):%0A
+ w = ScopeChecks.RedefineCheck.redefinedScope%0A
@@ -304,48 +304,9 @@
1,
-ScopeChecks.RedefineCheck.redefinedScope
+w
, 'f
@@ -464,56 +464,489 @@
2,
-ScopeChecks.RedefineCheck.redefinedScope, 'g', 3
+w, 'g', 3)%0A self.silent('def s(): pass%5Cn'%0A 'def f(): pass%5Cn')%0A self.silent('import sys%5Cn'%0A 'if sys.argv:%5Cn'%0A ' def f(): return 1%5Cn'%0A 'else:%5Cn'%0A ' def f(): return 0%5Cn')%0A self.warning('import sys%5Cn'%0A 'if sys.argv:%5Cn'%0A ' def f(): return 1%5Cn'%0A ' def f(): return 0%5Cn',%0A 3, w, 'f', 4
)%0A
@@ -960,68 +960,256 @@
elf.
-silent('
+warning('try:%5Cn'%0A '
def
-s
+f
():
-pass%5Cn'%0A 'def f(): pass%5Cn'
+return 1%5Cn'%0A 'except Exception:%5Cn'%0A ' pass%5Cn'%0A 'else:%5Cn'%0A ' def f(): return 0%5Cn',%0A 2, w, 'f', 6
)%0A
|
a434050e0f1c9f3e162898a3687cd7de8b77980c
|
Update load.py
|
simphony_mayavi/load.py
|
simphony_mayavi/load.py
|
from mayavi.core.api import registry
from simphony_mayavi.adapt2cuds import adapt2cuds
def load(filename, name=None, kind=None, rename_arrays=None):
""" Load the file data into a CUDS container.
"""
data_set = _read(filename)
return adapt2cuds(
data_set, name, kind, rename_arrays)
def _read(filename):
""" Find a suitable reader and read in the tvtk.Dataset.
"""
metasource = registry.get_file_reader(filename)
if metasource is None:
message = 'No suitable reader found for file: {}'
raise RuntimeError(message.format(filename))
if metasource.factory is None:
source = metasource.get_callable()()
source.initialize(filename)
source.update()
reader = source.reader
else:
message = 'Mayavi reader that requires a scene is not supported : {}'
raise NotImplementedError(message.format(filename))
if len(source.outputs) != 1:
message = 'Only one output is expected from the reader'
raise RuntimeError(message)
return reader.output
|
Python
| 0.000001
|
@@ -1038,17 +1038,16 @@
essage)%0A
-%0A
retu
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.