hexsha
stringlengths 40
40
| size
int64 2
1.02M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
245
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
245
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
245
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
1.02M
| avg_line_length
float64 1
417k
| max_line_length
int64 1
987k
| alphanum_fraction
float64 0
1
| content_no_comment
stringlengths 0
1.01M
| is_comment_constant_removed
bool 1
class | is_sharp_comment_removed
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1c47dc256aaf34a46737b9baba2f30bbed33feaf
| 2,184
|
py
|
Python
|
nuagevsdsim/simentities/nusimsapegressqosprofile.py
|
pdellaert/vspk-sim
|
459a84366a9bdde82d74aca18ea866e3d55d62ee
|
[
"BSD-3-Clause"
] | null | null | null |
nuagevsdsim/simentities/nusimsapegressqosprofile.py
|
pdellaert/vspk-sim
|
459a84366a9bdde82d74aca18ea866e3d55d62ee
|
[
"BSD-3-Clause"
] | null | null | null |
nuagevsdsim/simentities/nusimsapegressqosprofile.py
|
pdellaert/vspk-sim
|
459a84366a9bdde82d74aca18ea866e3d55d62ee
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
# BSD 3-Clause License
#
# Copyright (c) 2017, Nokia
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
NUSimSAPEgressQoSProfile
"""
from vspk import v5_0 as vsdk
from nuagevsdsim.simentities.nusimresource import NUSimResource
class NUSimSAPEgressQoSProfile(NUSimResource):
""" Represents a SAPEgressQoSProfile
Notes:
7x50 SAP Egress QoS profile
"""
__vspk_class__ = vsdk.NUSAPEgressQoSProfile
__unique_fields__ = ['externalID']
__mandatory_fields__ = []
__default_fields__ = {
}
__get_parents__ = ['gateway', 'redundancygroup']
__create_parents__ = []
def __init__(self):
super(NUSimSAPEgressQoSProfile, self).__init__()
| 39.709091
| 80
| 0.755952
|
from vspk import v5_0 as vsdk
from nuagevsdsim.simentities.nusimresource import NUSimResource
class NUSimSAPEgressQoSProfile(NUSimResource):
__vspk_class__ = vsdk.NUSAPEgressQoSProfile
__unique_fields__ = ['externalID']
__mandatory_fields__ = []
__default_fields__ = {
}
__get_parents__ = ['gateway', 'redundancygroup']
__create_parents__ = []
def __init__(self):
super(NUSimSAPEgressQoSProfile, self).__init__()
| true
| true
|
1c47dccf6915887e22c7722e4d7c64ee109f4851
| 10,838
|
py
|
Python
|
CTFd/views.py
|
ws4/TopCTFd
|
3b1e25df1318e86ff163a0b546f6e9b7f8305a5a
|
[
"Apache-2.0"
] | 1
|
2019-06-25T09:24:29.000Z
|
2019-06-25T09:24:29.000Z
|
CTFd/views.py
|
ws4/TopCTFd
|
3b1e25df1318e86ff163a0b546f6e9b7f8305a5a
|
[
"Apache-2.0"
] | null | null | null |
CTFd/views.py
|
ws4/TopCTFd
|
3b1e25df1318e86ff163a0b546f6e9b7f8305a5a
|
[
"Apache-2.0"
] | null | null | null |
import os
import re
from flask import current_app as app, render_template, request, redirect, abort, jsonify, url_for, session, Blueprint, Response, send_file
from flask.helpers import safe_join
from jinja2.exceptions import TemplateNotFound
from passlib.hash import bcrypt_sha256
from CTFd.models import db, Teams, Solves, Awards, Files, Pages
from CTFd.utils import cache, markdown
from CTFd import utils
views = Blueprint('views', __name__)
@views.route('/setup', methods=['GET', 'POST'])
def setup():
# with app.app_context():
# admin = Teams.query.filter_by(admin=True).first()
if not utils.is_setup():
if not session.get('nonce'):
session['nonce'] = utils.sha512(os.urandom(10))
if request.method == 'POST':
ctf_name = request.form['ctf_name']
ctf_name = utils.set_config('ctf_name', ctf_name)
# CSS
css = utils.set_config('start', '')
# Admin user
name = request.form['name']
email = request.form['email']
password = request.form['password']
admin = Teams(name, email, password)
admin.admin = True
admin.banned = True
# Index page
page = Pages('index', """<div class="container main-container">
<img class="logo" src="themes/original/static/img/logo.png" />
<h3 class="text-center" >
<p>A11111111 cool CTF platform from <a href="https://ctfd.io">ctfd.io</a></p>
<p style="">Follow us on social media:</p>
<a href="https://twitter.com/ctfdio"><i class="fa fa-twitter fa-2x" aria-hidden="true"></i></a>
<a href="https://facebook.com/ctfdio"><i class="fa fa-facebook-official fa-2x" aria-hidden="true"></i></a>
<a href="https://github.com/ctfd"><i class="fa fa-github fa-2x" aria-hidden="true"></i></a>
</h3>
<br>
<h4 class="text-center">
<a href="admin">Click here</a> to login and setup your CTF
</h4>
</div>""".format(request.script_root))
# max attempts per challenge
max_tries = utils.set_config('max_tries', 0)
# Start time
start = utils.set_config('start', None)
end = utils.set_config('end', None)
freeze = utils.set_config('freeze', None)
# Challenges cannot be viewed by unregistered users
view_challenges_unregistered = utils.set_config('view_challenges_unregistered', None)
# Allow/Disallow registration
prevent_registration = utils.set_config('prevent_registration', None)
# Verify emails
verify_emails = utils.set_config('verify_emails', None)
mail_server = utils.set_config('mail_server', None)
mail_port = utils.set_config('mail_port', None)
mail_tls = utils.set_config('mail_tls', None)
mail_ssl = utils.set_config('mail_ssl', None)
mail_username = utils.set_config('mail_username', None)
mail_password = utils.set_config('mail_password', None)
mail_useauth = utils.set_config('mail_useauth', None)
setup = utils.set_config('setup', True)
db.session.add(page)
db.session.add(admin)
db.session.commit()
session['username'] = admin.name
session['id'] = admin.id
session['admin'] = admin.admin
session['nonce'] = utils.sha512(os.urandom(10))
db.session.close()
app.setup = False
with app.app_context():
cache.clear()
return redirect(url_for('views.static_html'))
return render_template('setup.html', nonce=session.get('nonce'))
return redirect(url_for('views.static_html'))
# Custom CSS handler
@views.route('/static/user.css')
def custom_css():
return Response(utils.get_config('css'), mimetype='text/css')
# Static HTML files
@views.route("/", defaults={'template': 'index'})
@views.route("/<template>")
def static_html(template):
try:
return render_template('%s.html' % template)
except TemplateNotFound:
page = utils.get_page(template)
if page is None:
abort(404)
return render_template('page.html', content=markdown(page.html))
@views.route('/teams', defaults={'page': '1'})
@views.route('/teams/<int:page>')
def teams(page):
page = abs(int(page))
results_per_page = 50
page_start = results_per_page * (page - 1)
page_end = results_per_page * (page - 1) + results_per_page
if utils.get_config('verify_emails'):
count = Teams.query.filter_by(verified=True, banned=False).count()
teams = Teams.query.filter_by(verified=True, banned=False).slice(page_start, page_end).all()
else:
count = Teams.query.filter_by(banned=False).count()
teams = Teams.query.filter_by(banned=False).slice(page_start, page_end).all()
pages = int(count / results_per_page) + (count % results_per_page > 0)
return render_template('teams.html', teams=teams, team_pages=pages, curr_page=page)
@views.route('/team/<int:teamid>', methods=['GET', 'POST'])
def team(teamid):
if utils.get_config('view_scoreboard_if_utils.authed') and not utils.authed():
return redirect(url_for('auth.login', next=request.path))
errors = []
freeze = utils.get_config('freeze')
user = Teams.query.filter_by(id=teamid).first_or_404()
solves = Solves.query.filter_by(teamid=teamid)
awards = Awards.query.filter_by(teamid=teamid)
place = user.place()
score = user.score()
if freeze:
freeze = utils.unix_time_to_utc(freeze)
if teamid != session.get('id'):
solves = solves.filter(Solves.date < freeze)
awards = awards.filter(Awards.date < freeze)
solves = solves.all()
awards = awards.all()
db.session.close()
if utils.hide_scores() and teamid != session.get('id'):
errors.append('Scores are currently hidden')
if errors:
return render_template('team.html', team=user, errors=errors)
if request.method == 'GET':
return render_template('team.html', solves=solves, awards=awards, team=user, score=score, place=place, score_frozen=utils.is_scoreboard_frozen())
elif request.method == 'POST':
json = {'solves': []}
for x in solves:
json['solves'].append({'id': x.id, 'chal': x.chalid, 'team': x.teamid})
return jsonify(json)
@views.route('/profile', methods=['POST', 'GET'])
def profile():
if utils.authed():
if request.method == "POST":
errors = []
name = request.form.get('name')
email = request.form.get('email')
website = request.form.get('website')
affiliation = request.form.get('affiliation')
country = request.form.get('country')
user = Teams.query.filter_by(id=session['id']).first()
if not utils.get_config('prevent_name_change'):
names = Teams.query.filter_by(name=name).first()
name_len = len(request.form['name']) == 0
emails = Teams.query.filter_by(email=email).first()
valid_email = re.match(r"(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)", email)
if ('password' in request.form.keys() and not len(request.form['password']) == 0) and \
(not bcrypt_sha256.verify(request.form.get('confirm').strip(), user.password)):
errors.append("Your old password doesn't match what we have.")
if not valid_email:
errors.append("That email doesn't look right")
if not utils.get_config('prevent_name_change') and names and name != session['username']:
errors.append('That team name is already taken')
if emails and emails.id != session['id']:
errors.append('That email has already been used')
if not utils.get_config('prevent_name_change') and name_len:
errors.append('Pick a longer team name')
if website.strip() and not utils.validate_url(website):
errors.append("That doesn't look like a valid URL")
if len(errors) > 0:
return render_template('profile.html', name=name, email=email, website=website,
affiliation=affiliation, country=country, errors=errors)
else:
team = Teams.query.filter_by(id=session['id']).first()
if not utils.get_config('prevent_name_change'):
team.name = name
if team.email != email.lower():
team.email = email.lower()
if utils.get_config('verify_emails'):
team.verified = False
session['username'] = team.name
if 'password' in request.form.keys() and not len(request.form['password']) == 0:
team.password = bcrypt_sha256.encrypt(request.form.get('password'))
team.website = website
team.affiliation = affiliation
team.country = country
db.session.commit()
db.session.close()
return redirect(url_for('views.profile'))
else:
user = Teams.query.filter_by(id=session['id']).first()
name = user.name
email = user.email
website = user.website
affiliation = user.affiliation
country = user.country
prevent_name_change = utils.get_config('prevent_name_change')
confirm_email = utils.get_config('verify_emails') and not user.verified
return render_template('profile.html', name=name, email=email, website=website, affiliation=affiliation,
country=country, prevent_name_change=prevent_name_change, confirm_email=confirm_email)
else:
return redirect(url_for('auth.login'))
@views.route('/files', defaults={'path': ''})
@views.route('/files/<path:path>')
def file_handler(path):
f = Files.query.filter_by(location=path).first_or_404()
if f.chal:
if not utils.is_admin():
if not utils.ctftime():
if utils.view_after_ctf() and utils.ctf_started():
pass
else:
abort(403)
upload_folder = os.path.join(app.root_path, app.config['UPLOAD_FOLDER'])
return send_file(safe_join(upload_folder, f.location))
@views.route('/themes/<theme>/static/<path:path>')
def themes_handler(theme, path):
filename = safe_join(app.root_path, 'themes', theme, 'static', path)
if os.path.isfile(filename):
return send_file(filename)
else:
abort(404)
| 40.140741
| 153
| 0.605462
|
import os
import re
from flask import current_app as app, render_template, request, redirect, abort, jsonify, url_for, session, Blueprint, Response, send_file
from flask.helpers import safe_join
from jinja2.exceptions import TemplateNotFound
from passlib.hash import bcrypt_sha256
from CTFd.models import db, Teams, Solves, Awards, Files, Pages
from CTFd.utils import cache, markdown
from CTFd import utils
views = Blueprint('views', __name__)
@views.route('/setup', methods=['GET', 'POST'])
def setup():
if not utils.is_setup():
if not session.get('nonce'):
session['nonce'] = utils.sha512(os.urandom(10))
if request.method == 'POST':
ctf_name = request.form['ctf_name']
ctf_name = utils.set_config('ctf_name', ctf_name)
css = utils.set_config('start', '')
name = request.form['name']
email = request.form['email']
password = request.form['password']
admin = Teams(name, email, password)
admin.admin = True
admin.banned = True
page = Pages('index', """<div class="container main-container">
<img class="logo" src="themes/original/static/img/logo.png" />
<h3 class="text-center" >
<p>A11111111 cool CTF platform from <a href="https://ctfd.io">ctfd.io</a></p>
<p style="">Follow us on social media:</p>
<a href="https://twitter.com/ctfdio"><i class="fa fa-twitter fa-2x" aria-hidden="true"></i></a>
<a href="https://facebook.com/ctfdio"><i class="fa fa-facebook-official fa-2x" aria-hidden="true"></i></a>
<a href="https://github.com/ctfd"><i class="fa fa-github fa-2x" aria-hidden="true"></i></a>
</h3>
<br>
<h4 class="text-center">
<a href="admin">Click here</a> to login and setup your CTF
</h4>
</div>""".format(request.script_root))
max_tries = utils.set_config('max_tries', 0)
start = utils.set_config('start', None)
end = utils.set_config('end', None)
freeze = utils.set_config('freeze', None)
view_challenges_unregistered = utils.set_config('view_challenges_unregistered', None)
prevent_registration = utils.set_config('prevent_registration', None)
verify_emails = utils.set_config('verify_emails', None)
mail_server = utils.set_config('mail_server', None)
mail_port = utils.set_config('mail_port', None)
mail_tls = utils.set_config('mail_tls', None)
mail_ssl = utils.set_config('mail_ssl', None)
mail_username = utils.set_config('mail_username', None)
mail_password = utils.set_config('mail_password', None)
mail_useauth = utils.set_config('mail_useauth', None)
setup = utils.set_config('setup', True)
db.session.add(page)
db.session.add(admin)
db.session.commit()
session['username'] = admin.name
session['id'] = admin.id
session['admin'] = admin.admin
session['nonce'] = utils.sha512(os.urandom(10))
db.session.close()
app.setup = False
with app.app_context():
cache.clear()
return redirect(url_for('views.static_html'))
return render_template('setup.html', nonce=session.get('nonce'))
return redirect(url_for('views.static_html'))
@views.route('/static/user.css')
def custom_css():
return Response(utils.get_config('css'), mimetype='text/css')
@views.route("/", defaults={'template': 'index'})
@views.route("/<template>")
def static_html(template):
try:
return render_template('%s.html' % template)
except TemplateNotFound:
page = utils.get_page(template)
if page is None:
abort(404)
return render_template('page.html', content=markdown(page.html))
@views.route('/teams', defaults={'page': '1'})
@views.route('/teams/<int:page>')
def teams(page):
page = abs(int(page))
results_per_page = 50
page_start = results_per_page * (page - 1)
page_end = results_per_page * (page - 1) + results_per_page
if utils.get_config('verify_emails'):
count = Teams.query.filter_by(verified=True, banned=False).count()
teams = Teams.query.filter_by(verified=True, banned=False).slice(page_start, page_end).all()
else:
count = Teams.query.filter_by(banned=False).count()
teams = Teams.query.filter_by(banned=False).slice(page_start, page_end).all()
pages = int(count / results_per_page) + (count % results_per_page > 0)
return render_template('teams.html', teams=teams, team_pages=pages, curr_page=page)
@views.route('/team/<int:teamid>', methods=['GET', 'POST'])
def team(teamid):
if utils.get_config('view_scoreboard_if_utils.authed') and not utils.authed():
return redirect(url_for('auth.login', next=request.path))
errors = []
freeze = utils.get_config('freeze')
user = Teams.query.filter_by(id=teamid).first_or_404()
solves = Solves.query.filter_by(teamid=teamid)
awards = Awards.query.filter_by(teamid=teamid)
place = user.place()
score = user.score()
if freeze:
freeze = utils.unix_time_to_utc(freeze)
if teamid != session.get('id'):
solves = solves.filter(Solves.date < freeze)
awards = awards.filter(Awards.date < freeze)
solves = solves.all()
awards = awards.all()
db.session.close()
if utils.hide_scores() and teamid != session.get('id'):
errors.append('Scores are currently hidden')
if errors:
return render_template('team.html', team=user, errors=errors)
if request.method == 'GET':
return render_template('team.html', solves=solves, awards=awards, team=user, score=score, place=place, score_frozen=utils.is_scoreboard_frozen())
elif request.method == 'POST':
json = {'solves': []}
for x in solves:
json['solves'].append({'id': x.id, 'chal': x.chalid, 'team': x.teamid})
return jsonify(json)
@views.route('/profile', methods=['POST', 'GET'])
def profile():
if utils.authed():
if request.method == "POST":
errors = []
name = request.form.get('name')
email = request.form.get('email')
website = request.form.get('website')
affiliation = request.form.get('affiliation')
country = request.form.get('country')
user = Teams.query.filter_by(id=session['id']).first()
if not utils.get_config('prevent_name_change'):
names = Teams.query.filter_by(name=name).first()
name_len = len(request.form['name']) == 0
emails = Teams.query.filter_by(email=email).first()
valid_email = re.match(r"(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)", email)
if ('password' in request.form.keys() and not len(request.form['password']) == 0) and \
(not bcrypt_sha256.verify(request.form.get('confirm').strip(), user.password)):
errors.append("Your old password doesn't match what we have.")
if not valid_email:
errors.append("That email doesn't look right")
if not utils.get_config('prevent_name_change') and names and name != session['username']:
errors.append('That team name is already taken')
if emails and emails.id != session['id']:
errors.append('That email has already been used')
if not utils.get_config('prevent_name_change') and name_len:
errors.append('Pick a longer team name')
if website.strip() and not utils.validate_url(website):
errors.append("That doesn't look like a valid URL")
if len(errors) > 0:
return render_template('profile.html', name=name, email=email, website=website,
affiliation=affiliation, country=country, errors=errors)
else:
team = Teams.query.filter_by(id=session['id']).first()
if not utils.get_config('prevent_name_change'):
team.name = name
if team.email != email.lower():
team.email = email.lower()
if utils.get_config('verify_emails'):
team.verified = False
session['username'] = team.name
if 'password' in request.form.keys() and not len(request.form['password']) == 0:
team.password = bcrypt_sha256.encrypt(request.form.get('password'))
team.website = website
team.affiliation = affiliation
team.country = country
db.session.commit()
db.session.close()
return redirect(url_for('views.profile'))
else:
user = Teams.query.filter_by(id=session['id']).first()
name = user.name
email = user.email
website = user.website
affiliation = user.affiliation
country = user.country
prevent_name_change = utils.get_config('prevent_name_change')
confirm_email = utils.get_config('verify_emails') and not user.verified
return render_template('profile.html', name=name, email=email, website=website, affiliation=affiliation,
country=country, prevent_name_change=prevent_name_change, confirm_email=confirm_email)
else:
return redirect(url_for('auth.login'))
@views.route('/files', defaults={'path': ''})
@views.route('/files/<path:path>')
def file_handler(path):
f = Files.query.filter_by(location=path).first_or_404()
if f.chal:
if not utils.is_admin():
if not utils.ctftime():
if utils.view_after_ctf() and utils.ctf_started():
pass
else:
abort(403)
upload_folder = os.path.join(app.root_path, app.config['UPLOAD_FOLDER'])
return send_file(safe_join(upload_folder, f.location))
@views.route('/themes/<theme>/static/<path:path>')
def themes_handler(theme, path):
filename = safe_join(app.root_path, 'themes', theme, 'static', path)
if os.path.isfile(filename):
return send_file(filename)
else:
abort(404)
| true
| true
|
1c47ddab6db9862eac3c601311b8498882a12edb
| 1,209
|
py
|
Python
|
test.py
|
yyht/OpenAttack
|
637e81a9c60874ec35f923d7c62687cbd6ee3633
|
[
"MIT"
] | null | null | null |
test.py
|
yyht/OpenAttack
|
637e81a9c60874ec35f923d7c62687cbd6ee3633
|
[
"MIT"
] | null | null | null |
test.py
|
yyht/OpenAttack
|
637e81a9c60874ec35f923d7c62687cbd6ee3633
|
[
"MIT"
] | 1
|
2020-09-01T11:14:42.000Z
|
2020-09-01T11:14:42.000Z
|
import OpenAttack
dataset = OpenAttack.loadDataset("SST")[0][:5]
clsf = OpenAttack.loadVictim("BiLSTM.SST")
rules = OpenAttack.attackers.SEAAttacker.get_rules(clsf, dataset)
triggers = OpenAttack.attackers.UATAttacker.get_triggers(clsf, dataset, word2id=clsf.config["word2id"], embedding=clsf.config["embedding"])
attackers = [
OpenAttack.attackers.FDAttacker(word2id=clsf.config["word2id"], embedding=clsf.config["embedding"]),
OpenAttack.attackers.SEAAttacker(rules=rules),
OpenAttack.attackers.UATAttacker(triggers=triggers),
OpenAttack.attackers.TextBuggerAttacker(),
OpenAttack.attackers.TextFoolerAttacker(),
OpenAttack.attackers.VIPERAttacker(),
OpenAttack.attackers.DeepWordBugAttacker(),
OpenAttack.attackers.GANAttacker(),
OpenAttack.attackers.GeneticAttacker(),
OpenAttack.attackers.HotFlipAttacker(),
OpenAttack.attackers.PWWSAttacker(),
OpenAttack.attackers.SCPNAttacker(),
]
for attacker in attackers:
print(attacker.__class__.__name__)
try:
print(
OpenAttack.attack_evals.DefaultAttackEval(attacker, clsf, progress_bar=False).eval(dataset)
)
except Exception as e:
print(e)
print("\n")
| 36.636364
| 139
| 0.74359
|
import OpenAttack
dataset = OpenAttack.loadDataset("SST")[0][:5]
clsf = OpenAttack.loadVictim("BiLSTM.SST")
rules = OpenAttack.attackers.SEAAttacker.get_rules(clsf, dataset)
triggers = OpenAttack.attackers.UATAttacker.get_triggers(clsf, dataset, word2id=clsf.config["word2id"], embedding=clsf.config["embedding"])
attackers = [
OpenAttack.attackers.FDAttacker(word2id=clsf.config["word2id"], embedding=clsf.config["embedding"]),
OpenAttack.attackers.SEAAttacker(rules=rules),
OpenAttack.attackers.UATAttacker(triggers=triggers),
OpenAttack.attackers.TextBuggerAttacker(),
OpenAttack.attackers.TextFoolerAttacker(),
OpenAttack.attackers.VIPERAttacker(),
OpenAttack.attackers.DeepWordBugAttacker(),
OpenAttack.attackers.GANAttacker(),
OpenAttack.attackers.GeneticAttacker(),
OpenAttack.attackers.HotFlipAttacker(),
OpenAttack.attackers.PWWSAttacker(),
OpenAttack.attackers.SCPNAttacker(),
]
for attacker in attackers:
print(attacker.__class__.__name__)
try:
print(
OpenAttack.attack_evals.DefaultAttackEval(attacker, clsf, progress_bar=False).eval(dataset)
)
except Exception as e:
print(e)
print("\n")
| true
| true
|
1c47ded1a344d1cd51b13949d45a9106e2b325a5
| 1,220
|
py
|
Python
|
algos/rl/ppo_ray_random.py
|
XiaoSanchez/autophase
|
3d8d173ad27b9786e36efd22d0ceacbcf1cb1dfb
|
[
"BSD-3-Clause"
] | 14
|
2020-04-03T12:41:50.000Z
|
2022-02-04T00:05:01.000Z
|
algos/rl/ppo_ray_random.py
|
XiaoSanchez/autophase
|
3d8d173ad27b9786e36efd22d0ceacbcf1cb1dfb
|
[
"BSD-3-Clause"
] | 2
|
2020-03-02T04:32:58.000Z
|
2021-09-15T20:02:25.000Z
|
algos/rl/ppo_ray_random.py
|
XiaoSanchez/autophase
|
3d8d173ad27b9786e36efd22d0ceacbcf1cb1dfb
|
[
"BSD-3-Clause"
] | 8
|
2020-03-02T10:30:36.000Z
|
2021-08-03T02:29:38.000Z
|
import ray
import ray.tune as tune
from ray.rllib.agents import ppo
from gym_hls.envs.hls_env import HLSEnv
from gym_hls.envs.hls_multi_env import HLSMultiEnv
ray.init()
env_configs = {}
from gym_hls.envs.random_bm import get_random
num_pgms = 1000
bms = get_random(N=num_pgms)
for i, bm in enumerate(bms):
pgm, files= bm
env_configs['pgm'] = pgm
env_configs['pgm_files'] = files
env_configs['run_dir'] = 'run_'+pgm.replace(".c","")
#env_configs['feature_type'] = 'act_hist'
env_configs['verbose'] = True
env_configs['log_results'] = True
print("Tune for {}".format(pgm))
tune.run_experiments({
"my_experiment": {
"run": "PPO",
"env":HLSEnv,
"checkpoint_freq": 50,
"stop": {"episodes_total": 500},
"config": {
"sample_batch_size": 10,
"train_batch_size": 100,
"sgd_minibatch_size": 8,
"num_sgd_iter": 10,
"horizon": 12,
"num_gpus": 1,
"num_workers": 5,
"lr": 1e-3,
#"lr": tune.grid_search([0.01, 0.001, 0.0001]),
"vf_clip_param": 1e5,
"env_config": env_configs,
},
},
})
| 27.727273
| 61
| 0.568852
|
import ray
import ray.tune as tune
from ray.rllib.agents import ppo
from gym_hls.envs.hls_env import HLSEnv
from gym_hls.envs.hls_multi_env import HLSMultiEnv
ray.init()
env_configs = {}
from gym_hls.envs.random_bm import get_random
num_pgms = 1000
bms = get_random(N=num_pgms)
for i, bm in enumerate(bms):
pgm, files= bm
env_configs['pgm'] = pgm
env_configs['pgm_files'] = files
env_configs['run_dir'] = 'run_'+pgm.replace(".c","")
env_configs['verbose'] = True
env_configs['log_results'] = True
print("Tune for {}".format(pgm))
tune.run_experiments({
"my_experiment": {
"run": "PPO",
"env":HLSEnv,
"checkpoint_freq": 50,
"stop": {"episodes_total": 500},
"config": {
"sample_batch_size": 10,
"train_batch_size": 100,
"sgd_minibatch_size": 8,
"num_sgd_iter": 10,
"horizon": 12,
"num_gpus": 1,
"num_workers": 5,
"lr": 1e-3,
"vf_clip_param": 1e5,
"env_config": env_configs,
},
},
})
| true
| true
|
1c47df1a360b94aa764852cc2aa26849ae5db656
| 178
|
py
|
Python
|
src/users/admin.py
|
chiliseed/hub
|
83f29fbdd12e2260397e18e635f508459fa4990e
|
[
"Apache-2.0"
] | null | null | null |
src/users/admin.py
|
chiliseed/hub
|
83f29fbdd12e2260397e18e635f508459fa4990e
|
[
"Apache-2.0"
] | 4
|
2021-04-08T20:10:15.000Z
|
2021-06-10T20:18:17.000Z
|
src/users/admin.py
|
chiliseed/hub
|
83f29fbdd12e2260397e18e635f508459fa4990e
|
[
"Apache-2.0"
] | null | null | null |
"""Django admin ui for users."""
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from .models import User
admin.site.register(User, UserAdmin)
| 22.25
| 47
| 0.786517
|
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from .models import User
admin.site.register(User, UserAdmin)
| true
| true
|
1c47e205f85d9c9e839f341b0ccf620d6cab46cc
| 421
|
py
|
Python
|
Ejemplos/ejemplo13/src/main2.py
|
ampotty/uip-pc3
|
8362680226df6629791e7a4c6cdf1b738eadc5de
|
[
"MIT"
] | 10
|
2015-10-27T18:29:06.000Z
|
2019-04-03T04:05:31.000Z
|
Ejemplos/ejemplo13/src/main2.py
|
abdelgmartinezl/uip-pc3
|
8362680226df6629791e7a4c6cdf1b738eadc5de
|
[
"MIT"
] | 5
|
2015-10-13T01:12:51.000Z
|
2016-10-08T18:01:17.000Z
|
Ejemplos/ejemplo13/src/main2.py
|
ampotty/uip-pc3
|
8362680226df6629791e7a4c6cdf1b738eadc5de
|
[
"MIT"
] | 25
|
2015-09-19T00:40:17.000Z
|
2018-02-08T02:54:55.000Z
|
total = 0
cont1 = 0
while True:
nota = int(input("Calificacion (negativo para salir): "))
if nota < 0:
break
cont1 += 1
total = total + nota
if cont1 != 0:
promedio = total / cont1
else:
promedio = 0
if promedio >= 91:
print("Saca A",promedio)
elif promedio >= 81:
print("Saca B",promedio)
elif promedio >= 71:
print("Saca C",promedio)
else:
print("#TeQuedaste\a",promedio)
| 21.05
| 61
| 0.605701
|
total = 0
cont1 = 0
while True:
nota = int(input("Calificacion (negativo para salir): "))
if nota < 0:
break
cont1 += 1
total = total + nota
if cont1 != 0:
promedio = total / cont1
else:
promedio = 0
if promedio >= 91:
print("Saca A",promedio)
elif promedio >= 81:
print("Saca B",promedio)
elif promedio >= 71:
print("Saca C",promedio)
else:
print("#TeQuedaste\a",promedio)
| true
| true
|
1c47e2bbbb869849f0c26e0f9ceb36f00f7e3eca
| 671
|
py
|
Python
|
manage.py
|
Lioncat2002/confession_site
|
3b1d209b06cb1eac4b43f5c59cb70d2cfb49d25c
|
[
"MIT"
] | null | null | null |
manage.py
|
Lioncat2002/confession_site
|
3b1d209b06cb1eac4b43f5c59cb70d2cfb49d25c
|
[
"MIT"
] | null | null | null |
manage.py
|
Lioncat2002/confession_site
|
3b1d209b06cb1eac4b43f5c59cb70d2cfb49d25c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'confession_site.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| 29.173913
| 79
| 0.682563
|
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'confession_site.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| true
| true
|
1c47e4640389cb25f44fd436d0754a0c9ecffe9f
| 33,651
|
py
|
Python
|
trax/rl/actor_critic.py
|
YannickWehr/trax
|
67dda3b236339a7f6de803a3f84a9e92d0f0442c
|
[
"Apache-2.0"
] | null | null | null |
trax/rl/actor_critic.py
|
YannickWehr/trax
|
67dda3b236339a7f6de803a3f84a9e92d0f0442c
|
[
"Apache-2.0"
] | null | null | null |
trax/rl/actor_critic.py
|
YannickWehr/trax
|
67dda3b236339a7f6de803a3f84a9e92d0f0442c
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# Copyright 2020 The Trax Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Classes for RL training in Trax."""
import functools
import os
import gym
import numpy as np
import tensorflow as tf
from trax import data
from trax import fastmath
from trax import layers as tl
from trax import shapes
from trax import supervised
from trax.fastmath import numpy as jnp
from trax.rl import advantages as rl_advantages
from trax.rl import training as rl_training
from trax.supervised import lr_schedules as lr
class ActorCriticAgent(rl_training.PolicyAgent):
"""Trains policy and value models using actor-critic methods.
Attrs:
on_policy (bool): Whether the algorithm is on-policy. Used in the data
generators. Should be set in derived classes.
"""
on_policy = None
def __init__(self, task,
value_model=None,
value_optimizer=None,
value_lr_schedule=lr.multifactor,
value_batch_size=64,
value_train_steps_per_epoch=500,
value_evals_per_epoch=1,
value_eval_steps=1,
n_shared_layers=0,
added_policy_slice_length=0,
n_replay_epochs=1,
scale_value_targets=False,
q_value=False,
q_value_aggregate_max=True,
q_value_n_samples=1,
**kwargs): # Arguments of PolicyAgent come here.
"""Configures the actor-critic trainer.
Args:
task: `RLTask` instance to use.
value_model: Model to use for the value function.
value_optimizer: Optimizer to train the value model.
value_lr_schedule: lr schedule for value model training.
value_batch_size: Batch size for value model training.
value_train_steps_per_epoch: Number of steps are we using to train the
value model in each epoch.
value_evals_per_epoch: Number of value trainer evaluations per RL epoch;
only affects metric reporting.
value_eval_steps: Number of value trainer steps per evaluation; only
affects metric reporting.
n_shared_layers: Number of layers to share between value and policy
models.
added_policy_slice_length: How much longer should slices of
trajectories be for policy than for value training; this
is useful for TD calculations and only affect the length
of elements produced for policy batches; value batches
have maximum length set by `max_slice_length` in `**kwargs`.
n_replay_epochs: Number of last epochs to take into the replay buffer;
only makes sense for off-policy algorithms.
scale_value_targets: If `True`, scale value function targets by
`1 / (1 - gamma)`.
q_value: If `True`, use Q-values as baselines.
q_value_aggregate_max: If `True`, aggregate Q-values with max (or mean).
q_value_n_samples: Number of samples to average over when calculating
baselines based on Q-values.
**kwargs: Arguments for `PolicyAgent` superclass.
"""
self._n_shared_layers = n_shared_layers
self._value_batch_size = value_batch_size
self._value_train_steps_per_epoch = value_train_steps_per_epoch
self._value_evals_per_epoch = value_evals_per_epoch
self._value_eval_steps = value_eval_steps
# The 2 below will be initalized in super.__init__ anyway, but are needed
# to construct value batches which are needed before PolicyAgent init
# since policy input creation calls the value model -- hence this code.
self._task = task
self._max_slice_length = kwargs.get('max_slice_length', 1)
self._added_policy_slice_length = added_policy_slice_length
self._n_replay_epochs = n_replay_epochs
task.set_n_replay_epochs(n_replay_epochs)
if scale_value_targets:
self._value_network_scale = 1 / (1 - self._task.gamma)
else:
self._value_network_scale = 1
self._q_value = q_value
self._q_value_aggregate_max = q_value_aggregate_max
self._q_value_n_samples = q_value_n_samples
is_discrete = isinstance(self._task.action_space, gym.spaces.Discrete)
self._is_discrete = is_discrete
self._vocab_size = None
self._sample_all_discrete_actions = False
if q_value and is_discrete:
self._vocab_size = self.task.action_space.n
# TODO(lukaszkaiser): the code below is specific to AWR, move it.
# If n_samples = n_actions, we'll take them all in actor and reweight.
if self._q_value_n_samples == self._vocab_size:
# TODO(lukaszkaiser): set this explicitly once it's in AWR Trainer.
self._sample_all_discrete_actions = True
if q_value:
value_model = functools.partial(value_model,
inject_actions=True,
is_discrete=is_discrete,
vocab_size=self._vocab_size)
self._value_eval_model = value_model(mode='eval')
self._value_eval_model.init(self._value_model_signature)
self._value_eval_jit = tl.jit_forward(
self._value_eval_model.pure_fn, fastmath.device_count(), do_mean=False)
# Initialize policy training.
super().__init__(task, **kwargs)
# Initialize training of the value function.
value_output_dir = kwargs.get('output_dir', None)
if value_output_dir is not None:
value_output_dir = os.path.join(value_output_dir, 'value')
# If needed, create value_output_dir and missing parent directories.
if not tf.io.gfile.isdir(value_output_dir):
tf.io.gfile.makedirs(value_output_dir)
self._value_inputs = data.inputs.Inputs(
train_stream=lambda _: self.value_batches_stream())
self._value_trainer = supervised.Trainer(
model=value_model,
optimizer=value_optimizer,
lr_schedule=value_lr_schedule(),
loss_fn=tl.L2Loss(),
inputs=self._value_inputs,
output_dir=value_output_dir,
metrics={'value_loss': tl.L2Loss()})
@property
def _value_model_signature(self):
obs_sig = shapes.signature(self._task.observation_space)
target_sig = mask_sig = shapes.ShapeDtype(
shape=(1, 1, 1),
)
inputs_sig = (obs_sig.replace(shape=(1, 1) + obs_sig.shape),)
if self._q_value:
act_sig = shapes.signature(self._task.action_space)
inputs_sig += (act_sig.replace(shape=(1, 1) + act_sig.shape),)
return (*inputs_sig, target_sig, mask_sig)
@property
def _replay_epochs(self):
if self.on_policy:
assert self._n_replay_epochs == 1, (
'Non-unit replay buffer size only makes sense for off-policy '
'algorithms.'
)
return [-(ep + 1) for ep in range(self._n_replay_epochs)]
def _run_value_model(self, observations, dist_inputs):
if dist_inputs is None:
dist_inputs = jnp.zeros(
observations.shape[:2] + (self._policy_dist.n_inputs,)
)
actions = None
if self._q_value:
if self._sample_all_discrete_actions:
# Since we want to sample all actions, start by creating their list.
act = np.arange(self._vocab_size)
# Now act is a vector [0, ..., vocab_size-1], but we'll need to tile it.
# Add extra dimenstions so it's the same dimensionality as dist_inputs.
act = jnp.reshape(act, [-1] + [1] * (len(dist_inputs.shape) - 1))
# Now act is [vocab_size, 1, ..., 1], dimensionality of dist_inputs.
dist_inputs = jnp.broadcast_to(
dist_inputs, (self._q_value_n_samples,) + dist_inputs.shape)
if self._sample_all_discrete_actions:
actions = act + jnp.zeros(dist_inputs.shape[:-1], dtype=jnp.int32)
actions = jnp.swapaxes(actions, 0, 1)
# Swapping the n_samples and batch_size axes, so the input is split
# between accelerators along the batch_size axis.
dist_inputs = jnp.swapaxes(dist_inputs, 0, 1)
if not self._sample_all_discrete_actions:
actions = self._policy_dist.sample(dist_inputs)
log_probs = self._policy_dist.log_prob(dist_inputs, actions)
obs = observations
obs = jnp.reshape(obs, [obs.shape[0], 1] + list(obs.shape[1:]))
inputs = (obs, actions)
else:
log_probs = None
inputs = (observations,)
n_devices = fastmath.device_count()
weights = tl.for_n_devices(self._value_eval_model.weights, n_devices)
state = tl.for_n_devices(self._value_eval_model.state, n_devices)
rng = self._value_eval_model.rng
values, _ = self._value_eval_jit(inputs, weights, state, rng)
values *= self._value_network_scale
values = jnp.squeeze(values, axis=-1) # Remove the singleton depth dim.
return (values, actions, log_probs)
def _aggregate_values(self, values, aggregate_max, act_log_probs):
if self._q_value:
if aggregate_max:
values = jnp.max(values, axis=1)
elif self._sample_all_discrete_actions:
values = jnp.sum(values * jnp.exp(act_log_probs), axis=1)
else:
values = jnp.mean(values, axis=1)
return np.array(values) # Move the values to CPU.
def value_batches_stream(self):
"""Use the RLTask self._task to create inputs to the value model."""
max_slice_length = self._max_slice_length + self._added_policy_slice_length
for np_trajectory in self._task.trajectory_batch_stream(
self._value_batch_size,
max_slice_length=max_slice_length,
min_slice_length=(1 + self._added_policy_slice_length),
margin=self._added_policy_slice_length,
epochs=self._replay_epochs,
):
(values, _, act_log_probs) = self._run_value_model(
np_trajectory.observations, np_trajectory.dist_inputs
)
values = self._aggregate_values(
values, self._q_value_aggregate_max, act_log_probs)
# TODO(pkozakowski): Add some shape assertions and docs.
# Calculate targets based on the advantages over the target network - this
# allows TD learning for value networks.
advantages = self._advantage_estimator(
rewards=np_trajectory.rewards,
returns=np_trajectory.returns,
values=values,
dones=np_trajectory.dones,
gamma=self._task.gamma,
n_extra_steps=self._added_policy_slice_length,
)
length = advantages.shape[1]
values = values[:, :length]
target_returns = values + advantages
inputs = (np_trajectory.observations[:, :length],)
if self._q_value:
inputs += (np_trajectory.actions[:, :length],)
# Insert an extra depth dimension, so the target shape is consistent with
# the network output shape.
yield (
# Inputs: observations and maybe actions.
*inputs,
# Targets: computed returns.
target_returns[:, :, None] / self._value_network_scale,
# Mask to zero-out padding.
np_trajectory.mask[:, :length, None],
)
def policy_inputs(self, trajectory, values):
"""Create inputs to policy model from a TrajectoryNp and values.
Args:
trajectory: a TrajectoryNp, the trajectory to create inputs from
values: a numpy array: value function computed on trajectory
Returns:
a tuple of numpy arrays of the form (inputs, x1, x2, ...) that will be
passed to the policy model; policy model will compute outputs from
inputs and (outputs, x1, x2, ...) will be passed to self.policy_loss
which should be overridden accordingly.
"""
return NotImplementedError
def policy_batches_stream(self):
"""Use the RLTask self._task to create inputs to the policy model."""
# Maximum slice length for policy is max_slice_len + the added policy len.
max_slice_length = self._max_slice_length + self._added_policy_slice_length
for np_trajectory in self._task.trajectory_batch_stream(
self._policy_batch_size,
epochs=self._replay_epochs,
max_slice_length=max_slice_length,
margin=self._added_policy_slice_length,
include_final_state=False):
(values, _, act_log_probs) = self._run_value_model(
np_trajectory.observations, np_trajectory.dist_inputs)
values = self._aggregate_values(values, False, act_log_probs)
if len(values.shape) != 2:
raise ValueError('Values are expected to have shape ' +
'[batch_size, length], got: %s' % str(values.shape))
if values.shape[0] != self._policy_batch_size:
raise ValueError('Values first dimension should = policy batch size, ' +
'%d != %d' %(values.shape[0], self._policy_batch_size))
yield self.policy_inputs(np_trajectory, values)
def train_epoch(self):
"""Trains RL for one epoch."""
# Copy policy state accumulated during data collection to the trainer.
self._policy_trainer.model_state = self._policy_collect_model.state
# Copy policy weights and state to value trainer.
if self._n_shared_layers > 0:
_copy_model_weights_and_state(
0, self._n_shared_layers, self._policy_trainer, self._value_trainer
)
# Update the target value network.
self._value_eval_model.weights = self._value_trainer.model_weights
self._value_eval_model.state = self._value_trainer.model_state
n_value_evals = rl_training.remaining_evals(
self._value_trainer.step,
self._epoch,
self._value_train_steps_per_epoch,
self._value_evals_per_epoch)
for _ in range(n_value_evals):
self._value_trainer.train_epoch(
self._value_train_steps_per_epoch // self._value_evals_per_epoch,
self._value_eval_steps,
)
# Copy value weights and state to policy trainer.
if self._n_shared_layers > 0:
_copy_model_weights_and_state(
0, self._n_shared_layers, self._value_trainer, self._policy_trainer
)
n_policy_evals = rl_training.remaining_evals(
self._policy_trainer.step,
self._epoch,
self._policy_train_steps_per_epoch,
self._policy_evals_per_epoch)
# Check if there was a restart after value training finishes and policy not.
stopped_after_value = (n_value_evals == 0 and
n_policy_evals < self._policy_evals_per_epoch)
should_copy_weights = self._n_shared_layers > 0 and not stopped_after_value
if should_copy_weights:
_copy_model_weights_and_state(
0, self._n_shared_layers, self._value_trainer, self._policy_trainer
)
# Update the target value network.
self._value_eval_model.weights = self._value_trainer.model_weights
self._value_eval_model.state = self._value_trainer.model_state
for _ in range(n_policy_evals):
self._policy_trainer.train_epoch(
self._policy_train_steps_per_epoch // self._policy_evals_per_epoch,
self._policy_eval_steps,
)
def close(self):
self._value_trainer.close()
super().close()
def _copy_model_weights_and_state( # pylint: disable=invalid-name
start, end, from_trainer, to_trainer, copy_optimizer_slots=False
):
"""Copy model weights[start:end] from from_trainer to to_trainer."""
from_weights = from_trainer.model_weights
to_weights = list(to_trainer.model_weights)
shared_weights = from_weights[start:end]
to_weights[start:end] = shared_weights
to_trainer.model_weights = to_weights
from_state = from_trainer.model_state
to_state = list(to_trainer.model_state)
shared_state = from_state[start:end]
to_state[start:end] = shared_state
to_trainer.model_state = to_state
if copy_optimizer_slots:
# TODO(lukaszkaiser): make a nicer API in Trainer to support this.
# Currently we use the hack below. Note [0] since that's the model w/o loss.
# pylint: disable=protected-access
from_slots = from_trainer._opt_state.slots[0][start:end]
to_slots = to_trainer._opt_state.slots[0]
# The lines below do to_slots[start:end] = from_slots, but on tuples.
new_slots = to_slots[:start] + from_slots[start:end] + to_slots[end:]
new_slots = tuple([new_slots] + list(to_trainer._opt_state.slots[1:]))
to_trainer._opt_state = to_trainer._opt_state._replace(slots=new_slots)
# pylint: enable=protected-access
### Implementations of common actor-critic algorithms.
class AdvantageBasedActorCriticAgent(ActorCriticAgent):
"""Base class for advantage-based actor-critic algorithms."""
def __init__(
self,
task,
advantage_estimator=rl_advantages.td_lambda,
advantage_normalization=True,
advantage_normalization_epsilon=1e-5,
**kwargs
):
self._advantage_estimator = advantage_estimator
self._advantage_normalization = advantage_normalization
self._advantage_normalization_epsilon = advantage_normalization_epsilon
super().__init__(task, **kwargs)
def policy_inputs(self, trajectory, values):
"""Create inputs to policy model from a TrajectoryNp and values."""
# How much TD to use is determined by the added policy slice length,
# as the policy batches need to be this much longer to calculate TD.
advantages = self._advantage_estimator(
rewards=trajectory.rewards,
returns=trajectory.returns,
values=values,
dones=trajectory.dones,
gamma=self._task.gamma,
n_extra_steps=self._added_policy_slice_length,
)
# Observations should be the same length as advantages - so if we are
# using n_extra_steps, we need to trim the length to match.
obs = trajectory.observations[:, :advantages.shape[1]]
act = trajectory.actions[:, :advantages.shape[1]]
mask = trajectory.mask[:, :advantages.shape[1]] # Mask to zero-out padding.
if trajectory.dist_inputs is not None:
dist_inputs = trajectory.dist_inputs[:, :advantages.shape[1]]
else:
dist_inputs = jnp.zeros(advantages.shape + (self._policy_dist.n_inputs,))
# Shape checks to help debugging.
if len(advantages.shape) != 2:
raise ValueError('Advantages are expected to have shape ' +
'[batch_size, length], got: %s' % str(advantages.shape))
if act.shape[0:2] != advantages.shape:
raise ValueError('First 2 dimensions of actions should be the same as in '
'advantages, %s != %s' % (act.shape[0:2],
advantages.shape))
if obs.shape[0:2] != advantages.shape:
raise ValueError('First 2 dimensions of observations should be the same '
'as in advantages, %s != %s' % (obs.shape[0:2],
advantages.shape))
if dist_inputs.shape[:2] != advantages.shape:
raise ValueError('First 2 dimensions of dist_inputs should be the same '
'as in advantages, %s != %s' % (dist_inputs.shape[:2],
advantages.shape))
if mask.shape != advantages.shape:
raise ValueError('Mask and advantages shapes should be the same'
', %s != %s' % (mask.shape, advantages.shape))
return (obs, act, advantages, dist_inputs, mask)
@property
def policy_loss_given_log_probs(self):
"""Policy loss given action log-probabilities."""
raise NotImplementedError
def _preprocess_advantages(self, advantages):
if self._advantage_normalization:
advantages = (
(advantages - jnp.mean(advantages)) /
(jnp.std(advantages) + self._advantage_normalization_epsilon)
)
return advantages
@property
def policy_loss(self, **unused_kwargs):
"""Policy loss."""
def LossInput(dist_inputs, actions, advantages, old_dist_inputs): # pylint: disable=invalid-name
"""Calculates action log probabilities and normalizes advantages."""
advantages = self._preprocess_advantages(advantages)
log_probs = self._policy_dist.log_prob(dist_inputs, actions)
old_log_probs = self._policy_dist.log_prob(old_dist_inputs, actions)
return (log_probs, advantages, old_log_probs)
return tl.Serial(
tl.Fn('LossInput', LossInput, n_out=3),
# Policy loss is expected to consume
# (log_probs, advantages, old_log_probs, mask).
self.policy_loss_given_log_probs,
)
@property
def policy_metrics(self):
metrics = super().policy_metrics
metrics.update({
'advantage_mean': self.advantage_mean,
'advantage_std': self.advantage_std,
})
return metrics
@property
def advantage_mean(self):
return tl.Serial([
# (dist_inputs, advantages, old_dist_inputs, mask)
tl.Select([1]), # Select just the advantages.
tl.Fn('AdvantageMean', lambda x: jnp.mean(x)), # pylint: disable=unnecessary-lambda
])
@property
def advantage_std(self):
return tl.Serial([
# (dist_inputs, advantages, old_dist_inputs, mask)
tl.Select([1]), # Select just the advantages.
tl.Fn('AdvantageStd', lambda x: jnp.std(x)), # pylint: disable=unnecessary-lambda
])
class A2C(AdvantageBasedActorCriticAgent):
"""Trains policy and value models using the A2C algortithm."""
on_policy = True
def __init__(self, task, entropy_coeff=0.01, **kwargs):
"""Configures the A2C Trainer."""
self._entropy_coeff = entropy_coeff
super().__init__(task, **kwargs)
@property
def policy_loss_given_log_probs(self):
"""Definition of the Advantage Actor Critic (A2C) loss."""
# A2C is one of the most basic actor-critic RL algorithms.
# TODO(henrykm) re-factor f into rl_layers and finally share code between
# actor_critic.py and actor_critic_joint.py - requires change of inputs
# in actor_critic_joint.py from dist_inputs to log_probs.
def f(log_probs, advantages, old_log_probs, mask):
del old_log_probs # Not used in A2C.
# log_probs of the shape float32[128,1]
# advantages of the shape int32[128,1]
# mask of the shape int32[128,1]
if log_probs.shape != advantages.shape:
raise ValueError('New log-probs and advantages shapes '
'should be the same, %s != %s' % (log_probs.shape,
advantages.shape))
if log_probs.shape != mask.shape:
raise ValueError('New log-probs and mask shapes should be the same'
', %s != %s' % (log_probs.shape, mask.shape))
a2c_objective = -jnp.sum(log_probs * advantages * mask) / jnp.sum(mask)
entropy_vec = self._policy_dist.entropy(log_probs) * self._entropy_coeff
entropy_loss = jnp.mean(entropy_vec)
combined_loss = a2c_objective - entropy_loss
return combined_loss
return tl.Fn('A2CLoss', f)
class PPO(AdvantageBasedActorCriticAgent):
"""The Proximal Policy Optimization Algorithm aka PPO.
Trains policy and value models using the PPO algortithm.
"""
on_policy = True
def __init__(self, task, epsilon=0.2, entropy_coeff=0.01, **kwargs):
"""Configures the PPO Trainer."""
self._entropy_coeff = entropy_coeff
self._epsilon = epsilon
super().__init__(task, **kwargs)
@property
def policy_loss_given_log_probs(self):
"""Definition of the Proximal Policy Optimization loss."""
def f(new_log_probs, advantages, old_log_probs, mask):
# new_log_probs of the shape float32[128,1]
# advantages of the shape int32[128,1]
# old_log_probs of the shape int32[128,1]
# mask of the shape int32[128,1]
if new_log_probs.shape != advantages.shape:
raise ValueError('New log-probs and advantages shapes '
'should be the same, %s != %s' % (new_log_probs.shape,
advantages.shape))
if new_log_probs.shape != old_log_probs.shape:
raise ValueError('New log-probs and old log-probs shapes '
'should be the same, %s != %s' % (new_log_probs.shape,
old_log_probs.shape))
if new_log_probs.shape != mask.shape:
raise ValueError('New log-probs and mask shapes should be the same'
', %s != %s' % (new_log_probs.shape, mask.shape))
# The ratio between new_probs and old_probs expressed
# using log_probs and exponentaion
probs_ratio = jnp.exp(new_log_probs - old_log_probs)
if advantages.shape != probs_ratio.shape:
raise ValueError('New log-probs and old log probs shapes '
'should be the same, %s != %s' % (advantages.shape,
probs_ratio.shape))
unclipped_objective = probs_ratio * advantages
clipped_objective = jnp.clip(probs_ratio,
1 - self._epsilon,
1 + self._epsilon) * advantages
if unclipped_objective.shape != probs_ratio.shape:
raise ValueError('unclipped_objective and clipped_objective shapes '
'should be the same, %s != %s' % (
unclipped_objective.shape,
clipped_objective.shape))
ppo_objective = jnp.minimum(unclipped_objective, clipped_objective)
if ppo_objective.shape != mask.shape:
raise ValueError('ppo_objective and mask shapes '
'should be the same, %s != %s' % (
ppo_objective.shape,
mask.shape))
ppo_loss = -jnp.sum(ppo_objective * mask) / jnp.sum(mask)
entropy_vec = self._policy_dist.entropy(
new_log_probs) * self._entropy_coeff
entropy_loss = jnp.mean(entropy_vec)
combined_loss = ppo_loss - entropy_loss
return combined_loss
return tl.Fn('PPOLoss', f)
# AWR is an off-policy actor-critic RL algorithm.
def awr_weights(advantages, beta):
return jnp.exp(advantages / beta)
# Helper functions for computing AWR metrics.
def awr_metrics(beta, preprocess_layer=None):
return { # pylint: disable=g-complex-comprehension
'awr_weight_' + name: awr_weight_stat(name, fn, beta, preprocess_layer)
for (name, fn) in [
('mean', jnp.mean),
('std', jnp.std),
('min', jnp.min),
('max', jnp.max),
]
}
def awr_weight_stat(stat_name, stat_fn, beta, preprocess_layer):
# Select just the advantages if preprocess layer is not given.
preprocess = tl.Select([1]) if preprocess_layer is None else preprocess_layer
return tl.Serial([
preprocess,
tl.Fn(
'AWRWeight' + stat_name.capitalize(),
lambda x: stat_fn(awr_weights(x, beta)),
),
])
def AWRLoss(beta, w_max): # pylint: disable=invalid-name
"""Definition of the Advantage Weighted Regression (AWR) loss."""
def f(log_probs, advantages, old_log_probs, mask):
del old_log_probs # Not used in AWR.
weights = jnp.minimum(awr_weights(advantages, beta), w_max)
return -jnp.sum(log_probs * weights * mask) / jnp.sum(mask)
return tl.Fn('AWRLoss', f)
class AWR(AdvantageBasedActorCriticAgent):
"""Trains policy and value models using AWR."""
on_policy = False
def __init__(self, task, beta=1.0, w_max=20.0, **kwargs):
"""Configures the AWR Trainer."""
self._beta = beta
self._w_max = w_max
super().__init__(task, **kwargs)
@property
def policy_loss_given_log_probs(self):
"""Policy loss."""
return AWRLoss(beta=self._beta, w_max=self._w_max) # pylint: disable=no-value-for-parameter
@property
def policy_metrics(self):
metrics = super().policy_metrics
metrics.update(awr_metrics(self._beta))
return metrics
def SamplingAWRLoss(beta, w_max, reweight=False, sampled_all_discrete=False): # pylint: disable=invalid-name
"""Definition of the Advantage Weighted Regression (AWR) loss."""
def f(log_probs, advantages, old_log_probs, mask):
if reweight: # Use new policy weights for sampled actions instead.
mask *= jnp.exp(fastmath.stop_gradient(log_probs) - old_log_probs)
if sampled_all_discrete: # Actions were sampled uniformly; weight them.
mask *= jnp.exp(old_log_probs)
weights = jnp.minimum(awr_weights(advantages, beta), w_max)
return -jnp.sum(log_probs * weights * mask) / jnp.sum(mask)
return tl.Fn('SamplingAWRLoss', f)
class SamplingAWR(AdvantageBasedActorCriticAgent):
"""Trains policy and value models using Sampling AWR."""
on_policy = False
def __init__(self, task, beta=1.0, w_max=20.0, reweight=False, **kwargs):
"""Configures the AWR Trainer."""
self._beta = beta
self._w_max = w_max
self._reweight = reweight
super().__init__(task, q_value=True, **kwargs)
def _policy_inputs_to_advantages(self, preprocess):
"""A layer that computes advantages from policy inputs."""
def fn(dist_inputs, actions, q_values, act_log_probs, mask):
del dist_inputs, actions, mask
q_values = jnp.swapaxes(q_values, 0, 1)
act_log_probs = jnp.swapaxes(act_log_probs, 0, 1)
if self._sample_all_discrete_actions:
values = jnp.sum(q_values * jnp.exp(act_log_probs), axis=0)
else:
values = jnp.mean(q_values, axis=0)
advantages = q_values - values # Broadcasting values over n_samples
if preprocess:
advantages = self._preprocess_advantages(advantages)
return advantages
return tl.Fn('PolicyInputsToAdvantages', fn)
@property
def policy_metrics(self):
metrics = {
'policy_loss': self.policy_loss,
'advantage_mean': tl.Serial(
self._policy_inputs_to_advantages(False),
tl.Fn('Mean', lambda x: jnp.mean(x)) # pylint: disable=unnecessary-lambda
),
'advantage_std': tl.Serial(
self._policy_inputs_to_advantages(False),
tl.Fn('Std', lambda x: jnp.std(x)) # pylint: disable=unnecessary-lambda
)
}
metrics.update(awr_metrics(
self._beta, preprocess_layer=self._policy_inputs_to_advantages(True)))
return metrics
@property
def policy_loss(self, **unused_kwargs):
"""Policy loss."""
def LossInput(dist_inputs, actions, q_values, act_log_probs, mask): # pylint: disable=invalid-name
"""Calculates action log probabilities and normalizes advantages."""
# (batch_size, n_samples, ...) -> (n_samples, batch_size, ...)
q_values = jnp.swapaxes(q_values, 0, 1)
mask = jnp.swapaxes(mask, 0, 1)
actions = jnp.swapaxes(actions, 0, 1)
act_log_probs = jnp.swapaxes(act_log_probs, 0, 1)
# TODO(pkozakowski,lukaszkaiser): Try max here, or reweighting?
if self._sample_all_discrete_actions:
values = jnp.sum(q_values * jnp.exp(act_log_probs), axis=0)
else:
values = jnp.mean(q_values, axis=0)
advantages = q_values - values # Broadcasting values over n_samples
advantages = self._preprocess_advantages(advantages)
# Broadcast inputs and calculate log-probs
dist_inputs = jnp.broadcast_to(
dist_inputs, (self._q_value_n_samples,) + dist_inputs.shape)
log_probs = self._policy_dist.log_prob(dist_inputs, actions)
return (log_probs, advantages, act_log_probs, mask)
return tl.Serial(
tl.Fn('LossInput', LossInput, n_out=4),
# Policy loss is expected to consume
# (log_probs, advantages, old_log_probs, mask).
SamplingAWRLoss(
beta=self._beta, w_max=self._w_max, reweight=self._reweight,
sampled_all_discrete=self._sample_all_discrete_actions)
)
def policy_batches_stream(self):
"""Use the RLTask self._task to create inputs to the policy model."""
# For now TD-0 estimation of the value. TODO(pkozakowski): Support others?
for np_trajectory in self._task.trajectory_batch_stream(
self._policy_batch_size,
epochs=self._replay_epochs,
max_slice_length=self._max_slice_length,
include_final_state=False,
):
(q_values, actions, act_log_probs) = self._run_value_model(
np_trajectory.observations, np_trajectory.dist_inputs)
shapes.assert_same_shape(q_values, act_log_probs)
# q_values shape: (batch_size, n_samples, length)
if len(q_values.shape) != 3:
raise ValueError('Q-values are expected to have shape [batch_size, ' +
'n_samples, length], got: %s' % str(q_values.shape))
if q_values.shape[1] != self._q_value_n_samples:
raise ValueError('Q-values dimension 1 should = n_samples, %d != %d'
% (q_values.shape[1], self._q_value_n_samples))
if q_values.shape[0] != self._policy_batch_size:
raise ValueError('Q-values dimension 0 should = policy batch size, ' +
'%d!=%d' %(q_values.shape[1], self._policy_batch_size))
mask = np_trajectory.mask
mask = np.reshape(mask, [mask.shape[0], 1] + list(mask.shape[1:]))
mask = jnp.broadcast_to(mask, q_values.shape)
shapes.assert_same_shape(mask, q_values)
yield (np_trajectory.observations, actions, q_values, act_log_probs, mask)
| 41.238971
| 109
| 0.673799
|
import functools
import os
import gym
import numpy as np
import tensorflow as tf
from trax import data
from trax import fastmath
from trax import layers as tl
from trax import shapes
from trax import supervised
from trax.fastmath import numpy as jnp
from trax.rl import advantages as rl_advantages
from trax.rl import training as rl_training
from trax.supervised import lr_schedules as lr
class ActorCriticAgent(rl_training.PolicyAgent):
on_policy = None
def __init__(self, task,
value_model=None,
value_optimizer=None,
value_lr_schedule=lr.multifactor,
value_batch_size=64,
value_train_steps_per_epoch=500,
value_evals_per_epoch=1,
value_eval_steps=1,
n_shared_layers=0,
added_policy_slice_length=0,
n_replay_epochs=1,
scale_value_targets=False,
q_value=False,
q_value_aggregate_max=True,
q_value_n_samples=1,
**kwargs):
self._n_shared_layers = n_shared_layers
self._value_batch_size = value_batch_size
self._value_train_steps_per_epoch = value_train_steps_per_epoch
self._value_evals_per_epoch = value_evals_per_epoch
self._value_eval_steps = value_eval_steps
self._task = task
self._max_slice_length = kwargs.get('max_slice_length', 1)
self._added_policy_slice_length = added_policy_slice_length
self._n_replay_epochs = n_replay_epochs
task.set_n_replay_epochs(n_replay_epochs)
if scale_value_targets:
self._value_network_scale = 1 / (1 - self._task.gamma)
else:
self._value_network_scale = 1
self._q_value = q_value
self._q_value_aggregate_max = q_value_aggregate_max
self._q_value_n_samples = q_value_n_samples
is_discrete = isinstance(self._task.action_space, gym.spaces.Discrete)
self._is_discrete = is_discrete
self._vocab_size = None
self._sample_all_discrete_actions = False
if q_value and is_discrete:
self._vocab_size = self.task.action_space.n
if self._q_value_n_samples == self._vocab_size:
# TODO(lukaszkaiser): set this explicitly once it's in AWR Trainer.
self._sample_all_discrete_actions = True
if q_value:
value_model = functools.partial(value_model,
inject_actions=True,
is_discrete=is_discrete,
vocab_size=self._vocab_size)
self._value_eval_model = value_model(mode='eval')
self._value_eval_model.init(self._value_model_signature)
self._value_eval_jit = tl.jit_forward(
self._value_eval_model.pure_fn, fastmath.device_count(), do_mean=False)
super().__init__(task, **kwargs)
value_output_dir = kwargs.get('output_dir', None)
if value_output_dir is not None:
value_output_dir = os.path.join(value_output_dir, 'value')
if not tf.io.gfile.isdir(value_output_dir):
tf.io.gfile.makedirs(value_output_dir)
self._value_inputs = data.inputs.Inputs(
train_stream=lambda _: self.value_batches_stream())
self._value_trainer = supervised.Trainer(
model=value_model,
optimizer=value_optimizer,
lr_schedule=value_lr_schedule(),
loss_fn=tl.L2Loss(),
inputs=self._value_inputs,
output_dir=value_output_dir,
metrics={'value_loss': tl.L2Loss()})
@property
def _value_model_signature(self):
obs_sig = shapes.signature(self._task.observation_space)
target_sig = mask_sig = shapes.ShapeDtype(
shape=(1, 1, 1),
)
inputs_sig = (obs_sig.replace(shape=(1, 1) + obs_sig.shape),)
if self._q_value:
act_sig = shapes.signature(self._task.action_space)
inputs_sig += (act_sig.replace(shape=(1, 1) + act_sig.shape),)
return (*inputs_sig, target_sig, mask_sig)
@property
def _replay_epochs(self):
if self.on_policy:
assert self._n_replay_epochs == 1, (
'Non-unit replay buffer size only makes sense for off-policy '
'algorithms.'
)
return [-(ep + 1) for ep in range(self._n_replay_epochs)]
def _run_value_model(self, observations, dist_inputs):
if dist_inputs is None:
dist_inputs = jnp.zeros(
observations.shape[:2] + (self._policy_dist.n_inputs,)
)
actions = None
if self._q_value:
if self._sample_all_discrete_actions:
act = np.arange(self._vocab_size)
# Add extra dimenstions so it's the same dimensionality as dist_inputs.
act = jnp.reshape(act, [-1] + [1] * (len(dist_inputs.shape) - 1))
dist_inputs = jnp.broadcast_to(
dist_inputs, (self._q_value_n_samples,) + dist_inputs.shape)
if self._sample_all_discrete_actions:
actions = act + jnp.zeros(dist_inputs.shape[:-1], dtype=jnp.int32)
actions = jnp.swapaxes(actions, 0, 1)
dist_inputs = jnp.swapaxes(dist_inputs, 0, 1)
if not self._sample_all_discrete_actions:
actions = self._policy_dist.sample(dist_inputs)
log_probs = self._policy_dist.log_prob(dist_inputs, actions)
obs = observations
obs = jnp.reshape(obs, [obs.shape[0], 1] + list(obs.shape[1:]))
inputs = (obs, actions)
else:
log_probs = None
inputs = (observations,)
n_devices = fastmath.device_count()
weights = tl.for_n_devices(self._value_eval_model.weights, n_devices)
state = tl.for_n_devices(self._value_eval_model.state, n_devices)
rng = self._value_eval_model.rng
values, _ = self._value_eval_jit(inputs, weights, state, rng)
values *= self._value_network_scale
values = jnp.squeeze(values, axis=-1)
return (values, actions, log_probs)
def _aggregate_values(self, values, aggregate_max, act_log_probs):
if self._q_value:
if aggregate_max:
values = jnp.max(values, axis=1)
elif self._sample_all_discrete_actions:
values = jnp.sum(values * jnp.exp(act_log_probs), axis=1)
else:
values = jnp.mean(values, axis=1)
return np.array(values)
def value_batches_stream(self):
max_slice_length = self._max_slice_length + self._added_policy_slice_length
for np_trajectory in self._task.trajectory_batch_stream(
self._value_batch_size,
max_slice_length=max_slice_length,
min_slice_length=(1 + self._added_policy_slice_length),
margin=self._added_policy_slice_length,
epochs=self._replay_epochs,
):
(values, _, act_log_probs) = self._run_value_model(
np_trajectory.observations, np_trajectory.dist_inputs
)
values = self._aggregate_values(
values, self._q_value_aggregate_max, act_log_probs)
advantages = self._advantage_estimator(
rewards=np_trajectory.rewards,
returns=np_trajectory.returns,
values=values,
dones=np_trajectory.dones,
gamma=self._task.gamma,
n_extra_steps=self._added_policy_slice_length,
)
length = advantages.shape[1]
values = values[:, :length]
target_returns = values + advantages
inputs = (np_trajectory.observations[:, :length],)
if self._q_value:
inputs += (np_trajectory.actions[:, :length],)
yield (
*inputs,
target_returns[:, :, None] / self._value_network_scale,
np_trajectory.mask[:, :length, None],
)
def policy_inputs(self, trajectory, values):
return NotImplementedError
def policy_batches_stream(self):
max_slice_length = self._max_slice_length + self._added_policy_slice_length
for np_trajectory in self._task.trajectory_batch_stream(
self._policy_batch_size,
epochs=self._replay_epochs,
max_slice_length=max_slice_length,
margin=self._added_policy_slice_length,
include_final_state=False):
(values, _, act_log_probs) = self._run_value_model(
np_trajectory.observations, np_trajectory.dist_inputs)
values = self._aggregate_values(values, False, act_log_probs)
if len(values.shape) != 2:
raise ValueError('Values are expected to have shape ' +
'[batch_size, length], got: %s' % str(values.shape))
if values.shape[0] != self._policy_batch_size:
raise ValueError('Values first dimension should = policy batch size, ' +
'%d != %d' %(values.shape[0], self._policy_batch_size))
yield self.policy_inputs(np_trajectory, values)
def train_epoch(self):
self._policy_trainer.model_state = self._policy_collect_model.state
if self._n_shared_layers > 0:
_copy_model_weights_and_state(
0, self._n_shared_layers, self._policy_trainer, self._value_trainer
)
self._value_eval_model.weights = self._value_trainer.model_weights
self._value_eval_model.state = self._value_trainer.model_state
n_value_evals = rl_training.remaining_evals(
self._value_trainer.step,
self._epoch,
self._value_train_steps_per_epoch,
self._value_evals_per_epoch)
for _ in range(n_value_evals):
self._value_trainer.train_epoch(
self._value_train_steps_per_epoch // self._value_evals_per_epoch,
self._value_eval_steps,
)
if self._n_shared_layers > 0:
_copy_model_weights_and_state(
0, self._n_shared_layers, self._value_trainer, self._policy_trainer
)
n_policy_evals = rl_training.remaining_evals(
self._policy_trainer.step,
self._epoch,
self._policy_train_steps_per_epoch,
self._policy_evals_per_epoch)
stopped_after_value = (n_value_evals == 0 and
n_policy_evals < self._policy_evals_per_epoch)
should_copy_weights = self._n_shared_layers > 0 and not stopped_after_value
if should_copy_weights:
_copy_model_weights_and_state(
0, self._n_shared_layers, self._value_trainer, self._policy_trainer
)
self._value_eval_model.weights = self._value_trainer.model_weights
self._value_eval_model.state = self._value_trainer.model_state
for _ in range(n_policy_evals):
self._policy_trainer.train_epoch(
self._policy_train_steps_per_epoch // self._policy_evals_per_epoch,
self._policy_eval_steps,
)
def close(self):
self._value_trainer.close()
super().close()
def _copy_model_weights_and_state(
start, end, from_trainer, to_trainer, copy_optimizer_slots=False
):
from_weights = from_trainer.model_weights
to_weights = list(to_trainer.model_weights)
shared_weights = from_weights[start:end]
to_weights[start:end] = shared_weights
to_trainer.model_weights = to_weights
from_state = from_trainer.model_state
to_state = list(to_trainer.model_state)
shared_state = from_state[start:end]
to_state[start:end] = shared_state
to_trainer.model_state = to_state
if copy_optimizer_slots:
# pylint: disable=protected-access
from_slots = from_trainer._opt_state.slots[0][start:end]
to_slots = to_trainer._opt_state.slots[0]
# The lines below do to_slots[start:end] = from_slots, but on tuples.
new_slots = to_slots[:start] + from_slots[start:end] + to_slots[end:]
new_slots = tuple([new_slots] + list(to_trainer._opt_state.slots[1:]))
to_trainer._opt_state = to_trainer._opt_state._replace(slots=new_slots)
# pylint: enable=protected-access
### Implementations of common actor-critic algorithms.
class AdvantageBasedActorCriticAgent(ActorCriticAgent):
def __init__(
self,
task,
advantage_estimator=rl_advantages.td_lambda,
advantage_normalization=True,
advantage_normalization_epsilon=1e-5,
**kwargs
):
self._advantage_estimator = advantage_estimator
self._advantage_normalization = advantage_normalization
self._advantage_normalization_epsilon = advantage_normalization_epsilon
super().__init__(task, **kwargs)
def policy_inputs(self, trajectory, values):
# How much TD to use is determined by the added policy slice length,
# as the policy batches need to be this much longer to calculate TD.
advantages = self._advantage_estimator(
rewards=trajectory.rewards,
returns=trajectory.returns,
values=values,
dones=trajectory.dones,
gamma=self._task.gamma,
n_extra_steps=self._added_policy_slice_length,
)
# Observations should be the same length as advantages - so if we are
# using n_extra_steps, we need to trim the length to match.
obs = trajectory.observations[:, :advantages.shape[1]]
act = trajectory.actions[:, :advantages.shape[1]]
mask = trajectory.mask[:, :advantages.shape[1]] # Mask to zero-out padding.
if trajectory.dist_inputs is not None:
dist_inputs = trajectory.dist_inputs[:, :advantages.shape[1]]
else:
dist_inputs = jnp.zeros(advantages.shape + (self._policy_dist.n_inputs,))
# Shape checks to help debugging.
if len(advantages.shape) != 2:
raise ValueError('Advantages are expected to have shape ' +
'[batch_size, length], got: %s' % str(advantages.shape))
if act.shape[0:2] != advantages.shape:
raise ValueError('First 2 dimensions of actions should be the same as in '
'advantages, %s != %s' % (act.shape[0:2],
advantages.shape))
if obs.shape[0:2] != advantages.shape:
raise ValueError('First 2 dimensions of observations should be the same '
'as in advantages, %s != %s' % (obs.shape[0:2],
advantages.shape))
if dist_inputs.shape[:2] != advantages.shape:
raise ValueError('First 2 dimensions of dist_inputs should be the same '
'as in advantages, %s != %s' % (dist_inputs.shape[:2],
advantages.shape))
if mask.shape != advantages.shape:
raise ValueError('Mask and advantages shapes should be the same'
', %s != %s' % (mask.shape, advantages.shape))
return (obs, act, advantages, dist_inputs, mask)
@property
def policy_loss_given_log_probs(self):
raise NotImplementedError
def _preprocess_advantages(self, advantages):
if self._advantage_normalization:
advantages = (
(advantages - jnp.mean(advantages)) /
(jnp.std(advantages) + self._advantage_normalization_epsilon)
)
return advantages
@property
def policy_loss(self, **unused_kwargs):
def LossInput(dist_inputs, actions, advantages, old_dist_inputs): # pylint: disable=invalid-name
advantages = self._preprocess_advantages(advantages)
log_probs = self._policy_dist.log_prob(dist_inputs, actions)
old_log_probs = self._policy_dist.log_prob(old_dist_inputs, actions)
return (log_probs, advantages, old_log_probs)
return tl.Serial(
tl.Fn('LossInput', LossInput, n_out=3),
# Policy loss is expected to consume
# (log_probs, advantages, old_log_probs, mask).
self.policy_loss_given_log_probs,
)
@property
def policy_metrics(self):
metrics = super().policy_metrics
metrics.update({
'advantage_mean': self.advantage_mean,
'advantage_std': self.advantage_std,
})
return metrics
@property
def advantage_mean(self):
return tl.Serial([
# (dist_inputs, advantages, old_dist_inputs, mask)
tl.Select([1]), # Select just the advantages.
tl.Fn('AdvantageMean', lambda x: jnp.mean(x)), # pylint: disable=unnecessary-lambda
])
@property
def advantage_std(self):
return tl.Serial([
# (dist_inputs, advantages, old_dist_inputs, mask)
tl.Select([1]), # Select just the advantages.
tl.Fn('AdvantageStd', lambda x: jnp.std(x)), # pylint: disable=unnecessary-lambda
])
class A2C(AdvantageBasedActorCriticAgent):
on_policy = True
def __init__(self, task, entropy_coeff=0.01, **kwargs):
self._entropy_coeff = entropy_coeff
super().__init__(task, **kwargs)
@property
def policy_loss_given_log_probs(self):
# A2C is one of the most basic actor-critic RL algorithms.
# TODO(henrykm) re-factor f into rl_layers and finally share code between
# actor_critic.py and actor_critic_joint.py - requires change of inputs
# in actor_critic_joint.py from dist_inputs to log_probs.
def f(log_probs, advantages, old_log_probs, mask):
del old_log_probs # Not used in A2C.
# log_probs of the shape float32[128,1]
# advantages of the shape int32[128,1]
# mask of the shape int32[128,1]
if log_probs.shape != advantages.shape:
raise ValueError('New log-probs and advantages shapes '
'should be the same, %s != %s' % (log_probs.shape,
advantages.shape))
if log_probs.shape != mask.shape:
raise ValueError('New log-probs and mask shapes should be the same'
', %s != %s' % (log_probs.shape, mask.shape))
a2c_objective = -jnp.sum(log_probs * advantages * mask) / jnp.sum(mask)
entropy_vec = self._policy_dist.entropy(log_probs) * self._entropy_coeff
entropy_loss = jnp.mean(entropy_vec)
combined_loss = a2c_objective - entropy_loss
return combined_loss
return tl.Fn('A2CLoss', f)
class PPO(AdvantageBasedActorCriticAgent):
on_policy = True
def __init__(self, task, epsilon=0.2, entropy_coeff=0.01, **kwargs):
self._entropy_coeff = entropy_coeff
self._epsilon = epsilon
super().__init__(task, **kwargs)
@property
def policy_loss_given_log_probs(self):
def f(new_log_probs, advantages, old_log_probs, mask):
# new_log_probs of the shape float32[128,1]
# advantages of the shape int32[128,1]
# old_log_probs of the shape int32[128,1]
# mask of the shape int32[128,1]
if new_log_probs.shape != advantages.shape:
raise ValueError('New log-probs and advantages shapes '
'should be the same, %s != %s' % (new_log_probs.shape,
advantages.shape))
if new_log_probs.shape != old_log_probs.shape:
raise ValueError('New log-probs and old log-probs shapes '
'should be the same, %s != %s' % (new_log_probs.shape,
old_log_probs.shape))
if new_log_probs.shape != mask.shape:
raise ValueError('New log-probs and mask shapes should be the same'
', %s != %s' % (new_log_probs.shape, mask.shape))
# The ratio between new_probs and old_probs expressed
# using log_probs and exponentaion
probs_ratio = jnp.exp(new_log_probs - old_log_probs)
if advantages.shape != probs_ratio.shape:
raise ValueError('New log-probs and old log probs shapes '
'should be the same, %s != %s' % (advantages.shape,
probs_ratio.shape))
unclipped_objective = probs_ratio * advantages
clipped_objective = jnp.clip(probs_ratio,
1 - self._epsilon,
1 + self._epsilon) * advantages
if unclipped_objective.shape != probs_ratio.shape:
raise ValueError('unclipped_objective and clipped_objective shapes '
'should be the same, %s != %s' % (
unclipped_objective.shape,
clipped_objective.shape))
ppo_objective = jnp.minimum(unclipped_objective, clipped_objective)
if ppo_objective.shape != mask.shape:
raise ValueError('ppo_objective and mask shapes '
'should be the same, %s != %s' % (
ppo_objective.shape,
mask.shape))
ppo_loss = -jnp.sum(ppo_objective * mask) / jnp.sum(mask)
entropy_vec = self._policy_dist.entropy(
new_log_probs) * self._entropy_coeff
entropy_loss = jnp.mean(entropy_vec)
combined_loss = ppo_loss - entropy_loss
return combined_loss
return tl.Fn('PPOLoss', f)
# AWR is an off-policy actor-critic RL algorithm.
def awr_weights(advantages, beta):
return jnp.exp(advantages / beta)
# Helper functions for computing AWR metrics.
def awr_metrics(beta, preprocess_layer=None):
return { # pylint: disable=g-complex-comprehension
'awr_weight_' + name: awr_weight_stat(name, fn, beta, preprocess_layer)
for (name, fn) in [
('mean', jnp.mean),
('std', jnp.std),
('min', jnp.min),
('max', jnp.max),
]
}
def awr_weight_stat(stat_name, stat_fn, beta, preprocess_layer):
# Select just the advantages if preprocess layer is not given.
preprocess = tl.Select([1]) if preprocess_layer is None else preprocess_layer
return tl.Serial([
preprocess,
tl.Fn(
'AWRWeight' + stat_name.capitalize(),
lambda x: stat_fn(awr_weights(x, beta)),
),
])
def AWRLoss(beta, w_max): # pylint: disable=invalid-name
def f(log_probs, advantages, old_log_probs, mask):
del old_log_probs # Not used in AWR.
weights = jnp.minimum(awr_weights(advantages, beta), w_max)
return -jnp.sum(log_probs * weights * mask) / jnp.sum(mask)
return tl.Fn('AWRLoss', f)
class AWR(AdvantageBasedActorCriticAgent):
on_policy = False
def __init__(self, task, beta=1.0, w_max=20.0, **kwargs):
self._beta = beta
self._w_max = w_max
super().__init__(task, **kwargs)
@property
def policy_loss_given_log_probs(self):
return AWRLoss(beta=self._beta, w_max=self._w_max) # pylint: disable=no-value-for-parameter
@property
def policy_metrics(self):
metrics = super().policy_metrics
metrics.update(awr_metrics(self._beta))
return metrics
def SamplingAWRLoss(beta, w_max, reweight=False, sampled_all_discrete=False): # pylint: disable=invalid-name
def f(log_probs, advantages, old_log_probs, mask):
if reweight: # Use new policy weights for sampled actions instead.
mask *= jnp.exp(fastmath.stop_gradient(log_probs) - old_log_probs)
if sampled_all_discrete: # Actions were sampled uniformly; weight them.
mask *= jnp.exp(old_log_probs)
weights = jnp.minimum(awr_weights(advantages, beta), w_max)
return -jnp.sum(log_probs * weights * mask) / jnp.sum(mask)
return tl.Fn('SamplingAWRLoss', f)
class SamplingAWR(AdvantageBasedActorCriticAgent):
on_policy = False
def __init__(self, task, beta=1.0, w_max=20.0, reweight=False, **kwargs):
self._beta = beta
self._w_max = w_max
self._reweight = reweight
super().__init__(task, q_value=True, **kwargs)
def _policy_inputs_to_advantages(self, preprocess):
def fn(dist_inputs, actions, q_values, act_log_probs, mask):
del dist_inputs, actions, mask
q_values = jnp.swapaxes(q_values, 0, 1)
act_log_probs = jnp.swapaxes(act_log_probs, 0, 1)
if self._sample_all_discrete_actions:
values = jnp.sum(q_values * jnp.exp(act_log_probs), axis=0)
else:
values = jnp.mean(q_values, axis=0)
advantages = q_values - values # Broadcasting values over n_samples
if preprocess:
advantages = self._preprocess_advantages(advantages)
return advantages
return tl.Fn('PolicyInputsToAdvantages', fn)
@property
def policy_metrics(self):
metrics = {
'policy_loss': self.policy_loss,
'advantage_mean': tl.Serial(
self._policy_inputs_to_advantages(False),
tl.Fn('Mean', lambda x: jnp.mean(x)) # pylint: disable=unnecessary-lambda
),
'advantage_std': tl.Serial(
self._policy_inputs_to_advantages(False),
tl.Fn('Std', lambda x: jnp.std(x)) # pylint: disable=unnecessary-lambda
)
}
metrics.update(awr_metrics(
self._beta, preprocess_layer=self._policy_inputs_to_advantages(True)))
return metrics
@property
def policy_loss(self, **unused_kwargs):
def LossInput(dist_inputs, actions, q_values, act_log_probs, mask): # pylint: disable=invalid-name
# (batch_size, n_samples, ...) -> (n_samples, batch_size, ...)
q_values = jnp.swapaxes(q_values, 0, 1)
mask = jnp.swapaxes(mask, 0, 1)
actions = jnp.swapaxes(actions, 0, 1)
act_log_probs = jnp.swapaxes(act_log_probs, 0, 1)
# TODO(pkozakowski,lukaszkaiser): Try max here, or reweighting?
if self._sample_all_discrete_actions:
values = jnp.sum(q_values * jnp.exp(act_log_probs), axis=0)
else:
values = jnp.mean(q_values, axis=0)
advantages = q_values - values # Broadcasting values over n_samples
advantages = self._preprocess_advantages(advantages)
# Broadcast inputs and calculate log-probs
dist_inputs = jnp.broadcast_to(
dist_inputs, (self._q_value_n_samples,) + dist_inputs.shape)
log_probs = self._policy_dist.log_prob(dist_inputs, actions)
return (log_probs, advantages, act_log_probs, mask)
return tl.Serial(
tl.Fn('LossInput', LossInput, n_out=4),
# Policy loss is expected to consume
# (log_probs, advantages, old_log_probs, mask).
SamplingAWRLoss(
beta=self._beta, w_max=self._w_max, reweight=self._reweight,
sampled_all_discrete=self._sample_all_discrete_actions)
)
def policy_batches_stream(self):
# For now TD-0 estimation of the value. TODO(pkozakowski): Support others?
for np_trajectory in self._task.trajectory_batch_stream(
self._policy_batch_size,
epochs=self._replay_epochs,
max_slice_length=self._max_slice_length,
include_final_state=False,
):
(q_values, actions, act_log_probs) = self._run_value_model(
np_trajectory.observations, np_trajectory.dist_inputs)
shapes.assert_same_shape(q_values, act_log_probs)
# q_values shape: (batch_size, n_samples, length)
if len(q_values.shape) != 3:
raise ValueError('Q-values are expected to have shape [batch_size, ' +
'n_samples, length], got: %s' % str(q_values.shape))
if q_values.shape[1] != self._q_value_n_samples:
raise ValueError('Q-values dimension 1 should = n_samples, %d != %d'
% (q_values.shape[1], self._q_value_n_samples))
if q_values.shape[0] != self._policy_batch_size:
raise ValueError('Q-values dimension 0 should = policy batch size, ' +
'%d!=%d' %(q_values.shape[1], self._policy_batch_size))
mask = np_trajectory.mask
mask = np.reshape(mask, [mask.shape[0], 1] + list(mask.shape[1:]))
mask = jnp.broadcast_to(mask, q_values.shape)
shapes.assert_same_shape(mask, q_values)
yield (np_trajectory.observations, actions, q_values, act_log_probs, mask)
| true
| true
|
1c47e51dc09808eb307a0a839a939f5466b84977
| 576
|
py
|
Python
|
centralized/cbs/EdgeConstraint.py
|
mengwei1/multi_agent_path_planning
|
079a4af80c074e571a802af4506d416db5c6946a
|
[
"MIT"
] | null | null | null |
centralized/cbs/EdgeConstraint.py
|
mengwei1/multi_agent_path_planning
|
079a4af80c074e571a802af4506d416db5c6946a
|
[
"MIT"
] | null | null | null |
centralized/cbs/EdgeConstraint.py
|
mengwei1/multi_agent_path_planning
|
079a4af80c074e571a802af4506d416db5c6946a
|
[
"MIT"
] | null | null | null |
class EdgeConstraint(object):
def __init__(self, time, location_1, location_2):
self.time = time
self.location_1 = location_1
self.location_2 = location_2
def __eq__(self, other):
return self.time == other.time and self.location_1 == other.location_1 \
and self.location_2 == other.location_2
def __hash__(self):
return hash(str(self.time) + str(self.location_1) + str(self.location_2))
def __str__(self):
return '(' + str(self.time) + ', '+ str(self.location_1) +', '+ str(self.location_2) + ')'
| 36
| 98
| 0.631944
|
class EdgeConstraint(object):
def __init__(self, time, location_1, location_2):
self.time = time
self.location_1 = location_1
self.location_2 = location_2
def __eq__(self, other):
return self.time == other.time and self.location_1 == other.location_1 \
and self.location_2 == other.location_2
def __hash__(self):
return hash(str(self.time) + str(self.location_1) + str(self.location_2))
def __str__(self):
return '(' + str(self.time) + ', '+ str(self.location_1) +', '+ str(self.location_2) + ')'
| true
| true
|
1c47e7970da9d61be8311b50ed64ed5185e5dfd2
| 4,712
|
py
|
Python
|
tests/test_albumentations_pytorch.py
|
brandongk-ubco/autoalbument
|
1735ea4376694c2179ac62ce7d100a10b26f2558
|
[
"MIT"
] | 135
|
2020-11-03T15:48:30.000Z
|
2022-03-16T10:52:57.000Z
|
tests/test_albumentations_pytorch.py
|
brandongk-ubco/autoalbument
|
1735ea4376694c2179ac62ce7d100a10b26f2558
|
[
"MIT"
] | 31
|
2020-11-04T10:20:56.000Z
|
2022-03-24T13:46:07.000Z
|
tests/test_albumentations_pytorch.py
|
brandongk-ubco/autoalbument
|
1735ea4376694c2179ac62ce7d100a10b26f2558
|
[
"MIT"
] | 16
|
2020-11-16T08:33:48.000Z
|
2022-03-17T18:34:24.000Z
|
import albumentations.augmentations.functional as F
import pytest
import torch
from torch.autograd import gradcheck
import autoalbument.albumentations_pytorch.functional as PF
from tests.utils import assert_batches_match
class Base:
def scalar_to_tensor(self, arg, requires_grad=False, dtype=torch.float32):
if arg is None:
return None
return torch.tensor(arg, requires_grad=requires_grad, dtype=dtype)
def test_albumentations_match(self, image_batches, arg):
np_images, pytorch_batch = image_batches
tensor_arg = self.scalar_to_tensor(arg)
augmented_np_images = [self.albumentations_fn(image, arg) for image in np_images]
augmented_pytorch_batch = self.albumentations_pytorch_fn(pytorch_batch, tensor_arg)
assert_batches_match(augmented_np_images, augmented_pytorch_batch)
def test_gradients(self, gradcheck_batch, arg):
tensor_arg = self.scalar_to_tensor(arg, requires_grad=True, dtype=torch.float64)
gradcheck(self.albumentations_pytorch_fn, (gradcheck_batch, tensor_arg))
def albumentations_fn(self, image, arg):
raise NotImplementedError
def albumentations_pytorch_fn(self, pytorch_batch, arg):
raise NotImplementedError
@pytest.mark.parametrize("arg", [0.2, 0.4, 0.8])
class TestSolarize(Base):
def albumentations_fn(self, image, arg):
return F.solarize(image, threshold=arg)
def albumentations_pytorch_fn(self, pytorch_batch, arg):
return PF.solarize(pytorch_batch, threshold=arg)
def test_gradients(self, gradcheck_batch, arg):
pass
@pytest.mark.parametrize("arg", [[0.0, 0.0, 0.0], [1.0, 1.0, 1.0], [-1.0, -1.0, -1.0], [0.0, 0.7, -0.2]])
class TestShiftRgb(Base):
def albumentations_fn(self, image, arg):
return F.shift_rgb(image, r_shift=arg[0], g_shift=arg[1], b_shift=arg[2])
def albumentations_pytorch_fn(self, pytorch_batch, arg):
return PF.shift_rgb(pytorch_batch, r_shift=arg[0], g_shift=arg[1], b_shift=arg[2])
@pytest.mark.parametrize("arg", [-1.0, 0.1, 0.5, 1.0])
class TestBrightnessAdjust(Base):
def albumentations_fn(self, image, arg):
return F.brightness_contrast_adjust(image, beta=arg, beta_by_max=True)
def albumentations_pytorch_fn(self, pytorch_batch, arg):
return PF.brightness_adjust(pytorch_batch, beta=arg)
@pytest.mark.parametrize("arg", [-1.0, 0.1, 0.5, 1.0])
class TestContrastAdjust(Base):
def albumentations_fn(self, image, arg):
return F.brightness_contrast_adjust(image, alpha=arg)
def albumentations_pytorch_fn(self, pytorch_batch, arg):
return PF.contrast_adjust(pytorch_batch, alpha=arg)
@pytest.mark.parametrize("arg", [None])
class TestVflip(Base):
def albumentations_fn(self, image, arg):
return F.vflip(image)
def albumentations_pytorch_fn(self, pytorch_batch, arg):
return PF.vflip(pytorch_batch)
def test_gradients(self, gradcheck_batch, arg):
pass
@pytest.mark.parametrize("arg", [None])
class TestHflip(Base):
def albumentations_fn(self, image, arg):
return F.hflip(image)
def albumentations_pytorch_fn(self, pytorch_batch, arg):
return PF.hflip(pytorch_batch)
def test_gradients(self, gradcheck_batch, arg):
pass
@pytest.mark.parametrize(
"arg",
[
[0.01],
[-0.5],
[0.5],
[1.0 - 1e-6],
[-1.0 + 1e-6],
],
)
class TestShiftX(Base):
def albumentations_pytorch_fn(self, pytorch_batch, arg):
return PF.shift_x(pytorch_batch, dx=arg)
def test_albumentations_match(self, image_batches, arg):
pass
@pytest.mark.parametrize(
"arg",
[
[0.01],
[-0.5],
[0.5],
[1.0 - 1e-6],
[-1.0 + 1e-6],
],
)
class TestShiftY(Base):
def albumentations_pytorch_fn(self, pytorch_batch, arg):
return PF.shift_y(pytorch_batch, dy=arg)
def test_albumentations_match(self, image_batches, arg):
pass
@pytest.mark.parametrize(
"arg",
[
[0.1],
[-0.5],
[0.5],
[1.0 - 1e-6],
[-1.0 + 1e-6],
],
)
class TestScale(Base):
def albumentations_pytorch_fn(self, pytorch_batch, arg):
return PF.scale(pytorch_batch, scale=arg)
def test_albumentations_match(self, image_batches, arg):
pass
@pytest.mark.parametrize(
"arg",
[
[0.1],
[-0.5],
[0.5],
[1.0 - 1e-6],
[-1.0 + 1e-6],
],
)
class TestRotate(Base):
def albumentations_pytorch_fn(self, pytorch_batch, arg):
return PF.rotate(pytorch_batch, angle=arg)
def test_albumentations_match(self, image_batches, arg):
pass
| 28.215569
| 105
| 0.66702
|
import albumentations.augmentations.functional as F
import pytest
import torch
from torch.autograd import gradcheck
import autoalbument.albumentations_pytorch.functional as PF
from tests.utils import assert_batches_match
class Base:
def scalar_to_tensor(self, arg, requires_grad=False, dtype=torch.float32):
if arg is None:
return None
return torch.tensor(arg, requires_grad=requires_grad, dtype=dtype)
def test_albumentations_match(self, image_batches, arg):
np_images, pytorch_batch = image_batches
tensor_arg = self.scalar_to_tensor(arg)
augmented_np_images = [self.albumentations_fn(image, arg) for image in np_images]
augmented_pytorch_batch = self.albumentations_pytorch_fn(pytorch_batch, tensor_arg)
assert_batches_match(augmented_np_images, augmented_pytorch_batch)
def test_gradients(self, gradcheck_batch, arg):
tensor_arg = self.scalar_to_tensor(arg, requires_grad=True, dtype=torch.float64)
gradcheck(self.albumentations_pytorch_fn, (gradcheck_batch, tensor_arg))
def albumentations_fn(self, image, arg):
raise NotImplementedError
def albumentations_pytorch_fn(self, pytorch_batch, arg):
raise NotImplementedError
@pytest.mark.parametrize("arg", [0.2, 0.4, 0.8])
class TestSolarize(Base):
def albumentations_fn(self, image, arg):
return F.solarize(image, threshold=arg)
def albumentations_pytorch_fn(self, pytorch_batch, arg):
return PF.solarize(pytorch_batch, threshold=arg)
def test_gradients(self, gradcheck_batch, arg):
pass
@pytest.mark.parametrize("arg", [[0.0, 0.0, 0.0], [1.0, 1.0, 1.0], [-1.0, -1.0, -1.0], [0.0, 0.7, -0.2]])
class TestShiftRgb(Base):
def albumentations_fn(self, image, arg):
return F.shift_rgb(image, r_shift=arg[0], g_shift=arg[1], b_shift=arg[2])
def albumentations_pytorch_fn(self, pytorch_batch, arg):
return PF.shift_rgb(pytorch_batch, r_shift=arg[0], g_shift=arg[1], b_shift=arg[2])
@pytest.mark.parametrize("arg", [-1.0, 0.1, 0.5, 1.0])
class TestBrightnessAdjust(Base):
def albumentations_fn(self, image, arg):
return F.brightness_contrast_adjust(image, beta=arg, beta_by_max=True)
def albumentations_pytorch_fn(self, pytorch_batch, arg):
return PF.brightness_adjust(pytorch_batch, beta=arg)
@pytest.mark.parametrize("arg", [-1.0, 0.1, 0.5, 1.0])
class TestContrastAdjust(Base):
def albumentations_fn(self, image, arg):
return F.brightness_contrast_adjust(image, alpha=arg)
def albumentations_pytorch_fn(self, pytorch_batch, arg):
return PF.contrast_adjust(pytorch_batch, alpha=arg)
@pytest.mark.parametrize("arg", [None])
class TestVflip(Base):
def albumentations_fn(self, image, arg):
return F.vflip(image)
def albumentations_pytorch_fn(self, pytorch_batch, arg):
return PF.vflip(pytorch_batch)
def test_gradients(self, gradcheck_batch, arg):
pass
@pytest.mark.parametrize("arg", [None])
class TestHflip(Base):
def albumentations_fn(self, image, arg):
return F.hflip(image)
def albumentations_pytorch_fn(self, pytorch_batch, arg):
return PF.hflip(pytorch_batch)
def test_gradients(self, gradcheck_batch, arg):
pass
@pytest.mark.parametrize(
"arg",
[
[0.01],
[-0.5],
[0.5],
[1.0 - 1e-6],
[-1.0 + 1e-6],
],
)
class TestShiftX(Base):
def albumentations_pytorch_fn(self, pytorch_batch, arg):
return PF.shift_x(pytorch_batch, dx=arg)
def test_albumentations_match(self, image_batches, arg):
pass
@pytest.mark.parametrize(
"arg",
[
[0.01],
[-0.5],
[0.5],
[1.0 - 1e-6],
[-1.0 + 1e-6],
],
)
class TestShiftY(Base):
def albumentations_pytorch_fn(self, pytorch_batch, arg):
return PF.shift_y(pytorch_batch, dy=arg)
def test_albumentations_match(self, image_batches, arg):
pass
@pytest.mark.parametrize(
"arg",
[
[0.1],
[-0.5],
[0.5],
[1.0 - 1e-6],
[-1.0 + 1e-6],
],
)
class TestScale(Base):
def albumentations_pytorch_fn(self, pytorch_batch, arg):
return PF.scale(pytorch_batch, scale=arg)
def test_albumentations_match(self, image_batches, arg):
pass
@pytest.mark.parametrize(
"arg",
[
[0.1],
[-0.5],
[0.5],
[1.0 - 1e-6],
[-1.0 + 1e-6],
],
)
class TestRotate(Base):
def albumentations_pytorch_fn(self, pytorch_batch, arg):
return PF.rotate(pytorch_batch, angle=arg)
def test_albumentations_match(self, image_batches, arg):
pass
| true
| true
|
1c47e7ea31e47aede41afd960aa65ae5de620bf1
| 1,379
|
py
|
Python
|
notifications/migrations/0001_initial.py
|
ABERT-NOLA/App-Instagram
|
f1394a96baa8e19a5b4b8b1c96917b9da5f3fe43
|
[
"MIT"
] | 1
|
2020-11-17T09:00:59.000Z
|
2020-11-17T09:00:59.000Z
|
notifications/migrations/0001_initial.py
|
kahenya-anita/Insta-Clone
|
4894e959c17170505e73aee6dc497aeb29d55a71
|
[
"MIT"
] | null | null | null |
notifications/migrations/0001_initial.py
|
kahenya-anita/Insta-Clone
|
4894e959c17170505e73aee6dc497aeb29d55a71
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.2 on 2020-10-15 12:25
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('post', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Notification',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('notification_type', models.IntegerField(choices=[(1, 'Like'), (2, 'Comment'), (3, 'Follow')])),
('text_preview', models.CharField(blank=True, max_length=90)),
('date', models.DateTimeField(auto_now_add=True)),
('is_seen', models.BooleanField(default=False)),
('post', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='noti_post', to='post.post')),
('sender', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='noti_from_user', to=settings.AUTH_USER_MODEL)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='noti_to_user', to=settings.AUTH_USER_MODEL)),
],
),
]
| 43.09375
| 154
| 0.643945
|
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('post', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Notification',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('notification_type', models.IntegerField(choices=[(1, 'Like'), (2, 'Comment'), (3, 'Follow')])),
('text_preview', models.CharField(blank=True, max_length=90)),
('date', models.DateTimeField(auto_now_add=True)),
('is_seen', models.BooleanField(default=False)),
('post', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='noti_post', to='post.post')),
('sender', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='noti_from_user', to=settings.AUTH_USER_MODEL)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='noti_to_user', to=settings.AUTH_USER_MODEL)),
],
),
]
| true
| true
|
1c47e85f18698ee4a933fa7ead51a9efaadf9d71
| 6,007
|
py
|
Python
|
views.py
|
penzance/student_locations
|
44618a237a9061dbc9d705810ad88d255781f44d
|
[
"MIT"
] | 1
|
2015-06-12T13:48:42.000Z
|
2015-06-12T13:48:42.000Z
|
views.py
|
penzance/student_locations
|
44618a237a9061dbc9d705810ad88d255781f44d
|
[
"MIT"
] | null | null | null |
views.py
|
penzance/student_locations
|
44618a237a9061dbc9d705810ad88d255781f44d
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render, redirect, render_to_response
from django.views.decorators.http import require_http_methods
from ims_lti_py.tool_config import ToolConfig
from django.conf import settings
from django.template import RequestContext
from django.core.urlresolvers import reverse
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse
from student_locations.forms import StudentLocationForm
from student_locations.models import Locations
from student_locations.utils import validaterequiredltiparams, getparamfromsession
import logging
logger = logging.getLogger(__name__)
@require_http_methods(['GET'])
def index(request):
"""
Show the index file
"""
return render(request, 'student_locations/index.html')
@login_required()
@require_http_methods(['POST'])
def lti_launch(request):
"""
This method is here to build the LTI_LAUNCH dictionary containing all
the LTI parameters and place it into the session. This is nessesary as we
need to access these parameters throughout the application and they are only
available the first time the application loads.
"""
if request.user.is_authenticated():
if validaterequiredltiparams(request):
return redirect('sl:main')
else:
return render(request, 'student_locations/error.html', {'message': 'Error: The LTI parameter lis_course_offering_sourcedid is required by this LTI tool.'})
else:
return render(request, 'student_locations/error.html', {'message': 'Error: user is not authenticated!'})
@login_required()
@require_http_methods(['GET'])
def main(request):
"""
The main method dipslay the default view which is the map_view.
"""
key = settings.STUDENT_LOCATIONS_TOOL.get('google_map_api_v3_key')
return render(request, 'student_locations/map_view.html', {'request': request, 'api_key': key})
@login_required()
@require_http_methods(['GET'])
def user_edit_view(request):
"""
Displays the user edit view which allows users to enter their contact
and Location data for display on the google map.
"""
resource_link_id = getparamfromsession(request, 'resource_link_id')
user_id = getparamfromsession(request, 'user_id')
if not resource_link_id or not user_id:
return render(request, 'student_locations/error.html', {'message': 'Unable to retrieve params from session. You might want to try reloading the tool.'})
try:
student = Locations.objects.get(resource_link_id=resource_link_id, user_id=user_id)
except Locations.DoesNotExist:
student = None
if student:
form = StudentLocationForm(instance=student)
else:
form = StudentLocationForm()
return render(request, 'student_locations/user_edit_view.html', {'request': request, 'form': form})
@login_required()
def addoredituser(request):
"""
The action method for the user_edit_view form.
"""
resource_link_id = getparamfromsession(request, 'resource_link_id')
user_id = getparamfromsession(request, 'user_id')
try:
student = Locations.objects.get(resource_link_id=resource_link_id, user_id=user_id)
except Locations.DoesNotExist:
student = None
if student:
form = StudentLocationForm(instance=student, user_id=user_id, resource_link_id=resource_link_id, data=request.POST)
else:
logger.debug('student is None')
form = StudentLocationForm(user_id=user_id, resource_link_id=resource_link_id, data=request.POST)
if form.is_valid():
theform = form.save(commit=False)
theform.user_id = user_id
theform.resource_link_id = resource_link_id
theform.save()
key = settings.STUDENT_LOCATIONS_TOOL.get('google_map_api_v3_key')
return render(request, 'student_locations/map_view.html', {'request': request, 'api_key' : key})
else:
return render(request, 'student_locations/user_edit_view.html', {'request': request, 'form': form})
@login_required()
@require_http_methods(['GET'])
def table_view(request):
"""
renders the data and display of the table view of students
"""
resource_link_id = getparamfromsession(request, 'resource_link_id')
students = Locations.objects.filter(resource_link_id=resource_link_id)
return render(request, 'student_locations/table_view.html', {'request': request, 'data' : students})
@login_required()
@require_http_methods(['GET'])
def markers_class_xml(request):
"""
reders the XML containing the location data for the google map
"""
resource_link_id = getparamfromsession(request, 'resource_link_id')
students = Locations.objects.filter(resource_link_id=resource_link_id)
return render_to_response('student_locations/markers.xml',
{'data' : students},
context_instance=RequestContext(request))
@require_http_methods(['GET'])
def tool_config(request):
"""
This produces a Canvas specific XML config that can be used to
add this tool to the Canvas LMS
"""
if request.is_secure():
host = 'https://' + request.get_host()
else:
host = 'http://' + request.get_host()
url = host + reverse('sl:lti_launch')
lti_tool_config = ToolConfig(
title='Student Locations',
launch_url=url,
secure_launch_url=url,
)
account_nav_params = {
'enabled': 'true',
# optionally, supply a different URL for the link:
# 'url': 'http://library.harvard.edu',
'text': 'Student Locations',
}
lti_tool_config.set_ext_param('canvas.instructure.com', 'privacy_level', 'public')
lti_tool_config.set_ext_param('canvas.instructure.com', 'course_navigation', account_nav_params)
lti_tool_config.description = 'This LTI tool facilitates the display of Student Locations.'
resp = HttpResponse(lti_tool_config.to_xml(), content_type='text/xml', status=200)
return resp
| 37.080247
| 167
| 0.715832
|
from django.shortcuts import render, redirect, render_to_response
from django.views.decorators.http import require_http_methods
from ims_lti_py.tool_config import ToolConfig
from django.conf import settings
from django.template import RequestContext
from django.core.urlresolvers import reverse
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse
from student_locations.forms import StudentLocationForm
from student_locations.models import Locations
from student_locations.utils import validaterequiredltiparams, getparamfromsession
import logging
logger = logging.getLogger(__name__)
@require_http_methods(['GET'])
def index(request):
return render(request, 'student_locations/index.html')
@login_required()
@require_http_methods(['POST'])
def lti_launch(request):
if request.user.is_authenticated():
if validaterequiredltiparams(request):
return redirect('sl:main')
else:
return render(request, 'student_locations/error.html', {'message': 'Error: The LTI parameter lis_course_offering_sourcedid is required by this LTI tool.'})
else:
return render(request, 'student_locations/error.html', {'message': 'Error: user is not authenticated!'})
@login_required()
@require_http_methods(['GET'])
def main(request):
key = settings.STUDENT_LOCATIONS_TOOL.get('google_map_api_v3_key')
return render(request, 'student_locations/map_view.html', {'request': request, 'api_key': key})
@login_required()
@require_http_methods(['GET'])
def user_edit_view(request):
resource_link_id = getparamfromsession(request, 'resource_link_id')
user_id = getparamfromsession(request, 'user_id')
if not resource_link_id or not user_id:
return render(request, 'student_locations/error.html', {'message': 'Unable to retrieve params from session. You might want to try reloading the tool.'})
try:
student = Locations.objects.get(resource_link_id=resource_link_id, user_id=user_id)
except Locations.DoesNotExist:
student = None
if student:
form = StudentLocationForm(instance=student)
else:
form = StudentLocationForm()
return render(request, 'student_locations/user_edit_view.html', {'request': request, 'form': form})
@login_required()
def addoredituser(request):
resource_link_id = getparamfromsession(request, 'resource_link_id')
user_id = getparamfromsession(request, 'user_id')
try:
student = Locations.objects.get(resource_link_id=resource_link_id, user_id=user_id)
except Locations.DoesNotExist:
student = None
if student:
form = StudentLocationForm(instance=student, user_id=user_id, resource_link_id=resource_link_id, data=request.POST)
else:
logger.debug('student is None')
form = StudentLocationForm(user_id=user_id, resource_link_id=resource_link_id, data=request.POST)
if form.is_valid():
theform = form.save(commit=False)
theform.user_id = user_id
theform.resource_link_id = resource_link_id
theform.save()
key = settings.STUDENT_LOCATIONS_TOOL.get('google_map_api_v3_key')
return render(request, 'student_locations/map_view.html', {'request': request, 'api_key' : key})
else:
return render(request, 'student_locations/user_edit_view.html', {'request': request, 'form': form})
@login_required()
@require_http_methods(['GET'])
def table_view(request):
resource_link_id = getparamfromsession(request, 'resource_link_id')
students = Locations.objects.filter(resource_link_id=resource_link_id)
return render(request, 'student_locations/table_view.html', {'request': request, 'data' : students})
@login_required()
@require_http_methods(['GET'])
def markers_class_xml(request):
resource_link_id = getparamfromsession(request, 'resource_link_id')
students = Locations.objects.filter(resource_link_id=resource_link_id)
return render_to_response('student_locations/markers.xml',
{'data' : students},
context_instance=RequestContext(request))
@require_http_methods(['GET'])
def tool_config(request):
if request.is_secure():
host = 'https://' + request.get_host()
else:
host = 'http://' + request.get_host()
url = host + reverse('sl:lti_launch')
lti_tool_config = ToolConfig(
title='Student Locations',
launch_url=url,
secure_launch_url=url,
)
account_nav_params = {
'enabled': 'true',
'text': 'Student Locations',
}
lti_tool_config.set_ext_param('canvas.instructure.com', 'privacy_level', 'public')
lti_tool_config.set_ext_param('canvas.instructure.com', 'course_navigation', account_nav_params)
lti_tool_config.description = 'This LTI tool facilitates the display of Student Locations.'
resp = HttpResponse(lti_tool_config.to_xml(), content_type='text/xml', status=200)
return resp
| true
| true
|
1c47e8a94158577824e18e7417db7b1bfc6b3d7a
| 3,111
|
py
|
Python
|
sigeco/settings.py
|
edusantana/sigeco
|
5e9e612e2721b170d770b73a36df4b92c94bed6f
|
[
"MIT"
] | null | null | null |
sigeco/settings.py
|
edusantana/sigeco
|
5e9e612e2721b170d770b73a36df4b92c94bed6f
|
[
"MIT"
] | null | null | null |
sigeco/settings.py
|
edusantana/sigeco
|
5e9e612e2721b170d770b73a36df4b92c94bed6f
|
[
"MIT"
] | null | null | null |
"""
Django settings for sigeco project.
Generated by 'django-admin startproject' using Django 2.0.5.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '9l%mu66ff)sre+8$(j%&3%(3an+i+-di$o(v^wk0)y)!i92jit'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'core'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'sigeco.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'sigeco.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = 'pt-br'
TIME_ZONE = 'America/Recife'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
STATIC_URL = '/static/'
| 25.292683
| 91
| 0.693346
|
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = '9l%mu66ff)sre+8$(j%&3%(3an+i+-di$o(v^wk0)y)!i92jit'
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'core'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'sigeco.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'sigeco.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = 'pt-br'
TIME_ZONE = 'America/Recife'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
STATIC_URL = '/static/'
| true
| true
|
1c47e8b8f9a966f714a9afcbcc891fafd92fb95b
| 1,232
|
py
|
Python
|
tests/models.py
|
viralogic/py-queryable
|
12034ee04eaa176676df84cb49665c6a9f741f01
|
[
"MIT"
] | null | null | null |
tests/models.py
|
viralogic/py-queryable
|
12034ee04eaa176676df84cb49665c6a9f741f01
|
[
"MIT"
] | 1
|
2018-10-04T22:13:18.000Z
|
2018-10-18T04:01:59.000Z
|
tests/models.py
|
viralogic/py-queryable
|
12034ee04eaa176676df84cb49665c6a9f741f01
|
[
"MIT"
] | null | null | null |
from py_queryable import Model
from py_queryable import Column, PrimaryKey, ForeignKey
class StubModel(Model):
__table_name__ = u'test_table'
test_int_column = Column(int, 'int_column')
class StubModel2(Model):
__table_name__ = u'test_table'
test_int_column = Column(int)
class StubPrimary(Model):
__table_name__ = u"test_table"
test_pk = PrimaryKey(int, 'int_pk')
class StubPrimaryString(Model):
__table_name__ = u"test_table"
test_pk = PrimaryKey(unicode, 'unicode_pk')
class StubIntUnique(Model):
__table_name__ = u"test_table"
test_pk = PrimaryKey(int)
test_unique = Column(int, 'int_column', is_unique=True)
class StubForeignKey(Model):
__table_name__ = u"foreign_key_table"
test_pk = PrimaryKey(int, 'int_pk')
test_fk = ForeignKey(StubPrimary, 'test_fk', is_nullable=False)
class StubUpdateModel(Model):
__table_name__ = u"test_update_table"
key = PrimaryKey(int, 'key_column')
update_col = Column(int, 'update_column')
class Student(Model):
__table_name__ = u"student"
student_id = PrimaryKey(int, "student_id")
first_name = Column(unicode, "first_name")
last_name = Column(unicode, "last_name")
gpa = Column(int, "gpa")
| 25.142857
| 67
| 0.722403
|
from py_queryable import Model
from py_queryable import Column, PrimaryKey, ForeignKey
class StubModel(Model):
__table_name__ = u'test_table'
test_int_column = Column(int, 'int_column')
class StubModel2(Model):
__table_name__ = u'test_table'
test_int_column = Column(int)
class StubPrimary(Model):
__table_name__ = u"test_table"
test_pk = PrimaryKey(int, 'int_pk')
class StubPrimaryString(Model):
__table_name__ = u"test_table"
test_pk = PrimaryKey(unicode, 'unicode_pk')
class StubIntUnique(Model):
__table_name__ = u"test_table"
test_pk = PrimaryKey(int)
test_unique = Column(int, 'int_column', is_unique=True)
class StubForeignKey(Model):
__table_name__ = u"foreign_key_table"
test_pk = PrimaryKey(int, 'int_pk')
test_fk = ForeignKey(StubPrimary, 'test_fk', is_nullable=False)
class StubUpdateModel(Model):
__table_name__ = u"test_update_table"
key = PrimaryKey(int, 'key_column')
update_col = Column(int, 'update_column')
class Student(Model):
__table_name__ = u"student"
student_id = PrimaryKey(int, "student_id")
first_name = Column(unicode, "first_name")
last_name = Column(unicode, "last_name")
gpa = Column(int, "gpa")
| true
| true
|
1c47e91ab2f55e136165d27492cfb7b3e29d06ea
| 2,056
|
py
|
Python
|
scripts/_helpers.py
|
fneum/core-tso-data
|
480b1ea7524adc5d5425165668989fc9dd412e90
|
[
"MIT"
] | null | null | null |
scripts/_helpers.py
|
fneum/core-tso-data
|
480b1ea7524adc5d5425165668989fc9dd412e90
|
[
"MIT"
] | 1
|
2022-02-07T14:41:52.000Z
|
2022-02-07T14:41:52.000Z
|
scripts/_helpers.py
|
fneum/core-tso-data
|
480b1ea7524adc5d5425165668989fc9dd412e90
|
[
"MIT"
] | 4
|
2022-01-12T17:09:16.000Z
|
2022-02-07T14:36:21.000Z
|
# from https://github.com/PyPSA/pypsa-eur/blob/master/scripts/_helpers.py
import pypsa
from pathlib import Path
def mock_snakemake(rulename, **wildcards):
"""
This function is expected to be executed from the 'scripts'-directory of '
the snakemake project. It returns a snakemake.script.Snakemake object,
based on the Snakefile.
If a rule has wildcards, you have to specify them in **wildcards.
Parameters
----------
rulename: str
name of the rule for which the snakemake object should be generated
**wildcards:
keyword arguments fixing the wildcards. Only necessary if wildcards are
needed.
"""
import snakemake as sm
import os
from pypsa.descriptors import Dict
from snakemake.script import Snakemake
script_dir = Path(__file__).parent.resolve()
assert (
Path.cwd().resolve() == script_dir
), f"mock_snakemake has to be run from the repository scripts directory {script_dir}"
os.chdir(script_dir.parent)
for p in sm.SNAKEFILE_CHOICES:
if os.path.exists(p):
snakefile = p
break
workflow = sm.Workflow(snakefile, overwrite_configfiles=[])
workflow.include(snakefile)
workflow.global_resources = {}
rule = workflow.get_rule(rulename)
dag = sm.dag.DAG(workflow, rules=[rule])
wc = Dict(wildcards)
job = sm.jobs.Job(rule, dag, wc)
def make_accessable(*ios):
for io in ios:
for i in range(len(io)):
io[i] = os.path.abspath(io[i])
make_accessable(job.input, job.output, job.log)
snakemake = Snakemake(
job.input,
job.output,
job.params,
job.wildcards,
job.threads,
job.resources,
job.log,
job.dag.workflow.config,
job.rule.name,
None,
)
# create log and output dir if not existent
for path in list(snakemake.log) + list(snakemake.output):
Path(path).parent.mkdir(parents=True, exist_ok=True)
os.chdir(script_dir)
return snakemake
| 30.686567
| 89
| 0.646401
|
import pypsa
from pathlib import Path
def mock_snakemake(rulename, **wildcards):
import snakemake as sm
import os
from pypsa.descriptors import Dict
from snakemake.script import Snakemake
script_dir = Path(__file__).parent.resolve()
assert (
Path.cwd().resolve() == script_dir
), f"mock_snakemake has to be run from the repository scripts directory {script_dir}"
os.chdir(script_dir.parent)
for p in sm.SNAKEFILE_CHOICES:
if os.path.exists(p):
snakefile = p
break
workflow = sm.Workflow(snakefile, overwrite_configfiles=[])
workflow.include(snakefile)
workflow.global_resources = {}
rule = workflow.get_rule(rulename)
dag = sm.dag.DAG(workflow, rules=[rule])
wc = Dict(wildcards)
job = sm.jobs.Job(rule, dag, wc)
def make_accessable(*ios):
for io in ios:
for i in range(len(io)):
io[i] = os.path.abspath(io[i])
make_accessable(job.input, job.output, job.log)
snakemake = Snakemake(
job.input,
job.output,
job.params,
job.wildcards,
job.threads,
job.resources,
job.log,
job.dag.workflow.config,
job.rule.name,
None,
)
for path in list(snakemake.log) + list(snakemake.output):
Path(path).parent.mkdir(parents=True, exist_ok=True)
os.chdir(script_dir)
return snakemake
| true
| true
|
1c47e955191e482356b4dccd864bc429173b5548
| 4,999
|
py
|
Python
|
frontend/app/export.py
|
emsch/femida
|
48de931c5d563e7fd354e6593a702f2397fd566f
|
[
"Apache-2.0"
] | null | null | null |
frontend/app/export.py
|
emsch/femida
|
48de931c5d563e7fd354e6593a702f2397fd566f
|
[
"Apache-2.0"
] | 52
|
2018-09-16T13:08:09.000Z
|
2020-06-07T09:30:27.000Z
|
frontend/app/export.py
|
emsch/femida
|
48de931c5d563e7fd354e6593a702f2397fd566f
|
[
"Apache-2.0"
] | 10
|
2018-09-26T20:06:41.000Z
|
2020-06-16T17:35:59.000Z
|
#!/usr/bin/env python3
import datetime
import json
import xlsxwriter
from io import BytesIO
from flask import (
Blueprint,
send_file
)
from bson import json_util
from collections import Counter
mod_export = Blueprint('export', __name__)
from database import mongo # noqa
pdfs = mongo.db.pdfs
answers = mongo.db.answers
class Col:
def __init__(self, start_from=0):
self.i = start_from
def __call__(self):
val = self.i
self.i += 1
return val
def current(self):
return self.i
class Question:
def __init__(
self, id_=None, option=None, banned_options="",
yellow=None, red=None,
):
self.id = id_
self.banned_options = banned_options
if option is None:
self.options = Counter()
else:
self.options = Counter([self.clean_option(option)])
self.has_updates = False
self.has_contradicting_updates = False
self.updates = []
self.yellow = yellow
self.red = red
def clean_option(self, option):
for i in self.banned_options:
option = option.replace(i, "")
return option
def update(self, option):
if len(self.updates) > 0 and option not in self.updates:
self.has_contradicting_updates = True
self.updates.append(option)
self.options.update([self.clean_option(option)])
if len(self.options) > 1:
self.has_updates = True
def get_res_style(self):
res = self.get_res()
# ? counts = [i[1] for i in self.options.most_common()]
if not self.has_contradicting_updates:
# All good!
if self.has_updates:
return res, self.yellow
else:
return res, None
else:
# PROBLEM!
return res, self.red
def get_res(self):
if len(self.options) == 0:
return None
elif len(self.updates) > 0:
return self.updates[-1]
else:
return self.options.most_common(1)[0][0]
@mod_export.route('/export')
def export():
output = BytesIO()
# Create an new Excel file and add a worksheet.
workbook = xlsxwriter.Workbook(output)
worksheet = workbook.add_worksheet()
red = workbook.add_format({'bold': True, 'bg_color': 'red'})
yellow = workbook.add_format({'bold': False, 'bg_color': 'yellow'})
bold = workbook.add_format({'bold': True})
header = ["№", "status", "surname", "name", "patronumic", "class", "type",
"variant", "requested_manual", "manual_checks", "img_test_form",
"img_fio", "UUID"]
header.extend([str(i) for i in range(1, 41)])
header.extend(["raw_json"])
for i, v in enumerate(header):
worksheet.write(0, i, v, bold)
for row, r in enumerate(answers.find()):
col = Col()
try:
worksheet.write(1+row, col(), 1+row)
worksheet.write(1+row, col(), r.get('status', ""))
# ФИО
for field in ['surname', 'name', 'patronymic', 'class', 'type', 'variant']:
question = Question(field, yellow=yellow, red=red)
for personal in r['personal']:
question.update(personal.get(field, ""))
worksheet.write(1+row, col(), *question.get_res_style())
# cnt
requested_manual = len(r.get('requested_manual', []))
if requested_manual > 0:
worksheet.write(1+row, col(), requested_manual, red)
else:
worksheet.write(1+row, col(), requested_manual)
manual_checks = len(r.get('manual_checks', []))
worksheet.write(1+row, col(), manual_checks)
# IMGS
worksheet.write(1+row, col(), 'http://femida.emsch.ru' + r.get('img_test_form', ""))
worksheet.write(1+row, col(), 'http://femida.emsch.ru' + r.get('img_fio', ""))
worksheet.write(1+row, col(), r.get('UUID', ""))
# answers
start = col.current()
for q in range(1, 41):
question = Question(
q, r.get('test_results', {}).get(str(q), ""), "F",
yellow, red
)
for update in r.get('test_updates', []):
if str(q) in update['updates']:
question.update(update['updates'][str(q)])
worksheet.write(1+row, start+q-1, *question.get_res_style())
col()
worksheet.write(1+row, col(), json.dumps(r, default=json_util.default))
except Exception as e:
worksheet.write(1+row, 0, 'ERROR OCCURED: ' + str(e))
workbook.close()
output.seek(0)
# finally return the file
attachment_filename = 'femida_%s.xlsx' % datetime.datetime.now().isoformat()[:19]
return send_file(output, attachment_filename=attachment_filename, as_attachment=True)
| 32.888158
| 96
| 0.561312
|
import datetime
import json
import xlsxwriter
from io import BytesIO
from flask import (
Blueprint,
send_file
)
from bson import json_util
from collections import Counter
mod_export = Blueprint('export', __name__)
from database import mongo
pdfs = mongo.db.pdfs
answers = mongo.db.answers
class Col:
def __init__(self, start_from=0):
self.i = start_from
def __call__(self):
val = self.i
self.i += 1
return val
def current(self):
return self.i
class Question:
def __init__(
self, id_=None, option=None, banned_options="",
yellow=None, red=None,
):
self.id = id_
self.banned_options = banned_options
if option is None:
self.options = Counter()
else:
self.options = Counter([self.clean_option(option)])
self.has_updates = False
self.has_contradicting_updates = False
self.updates = []
self.yellow = yellow
self.red = red
def clean_option(self, option):
for i in self.banned_options:
option = option.replace(i, "")
return option
def update(self, option):
if len(self.updates) > 0 and option not in self.updates:
self.has_contradicting_updates = True
self.updates.append(option)
self.options.update([self.clean_option(option)])
if len(self.options) > 1:
self.has_updates = True
def get_res_style(self):
res = self.get_res()
if not self.has_contradicting_updates:
if self.has_updates:
return res, self.yellow
else:
return res, None
else:
return res, self.red
def get_res(self):
if len(self.options) == 0:
return None
elif len(self.updates) > 0:
return self.updates[-1]
else:
return self.options.most_common(1)[0][0]
@mod_export.route('/export')
def export():
output = BytesIO()
workbook = xlsxwriter.Workbook(output)
worksheet = workbook.add_worksheet()
red = workbook.add_format({'bold': True, 'bg_color': 'red'})
yellow = workbook.add_format({'bold': False, 'bg_color': 'yellow'})
bold = workbook.add_format({'bold': True})
header = ["№", "status", "surname", "name", "patronumic", "class", "type",
"variant", "requested_manual", "manual_checks", "img_test_form",
"img_fio", "UUID"]
header.extend([str(i) for i in range(1, 41)])
header.extend(["raw_json"])
for i, v in enumerate(header):
worksheet.write(0, i, v, bold)
for row, r in enumerate(answers.find()):
col = Col()
try:
worksheet.write(1+row, col(), 1+row)
worksheet.write(1+row, col(), r.get('status', ""))
for field in ['surname', 'name', 'patronymic', 'class', 'type', 'variant']:
question = Question(field, yellow=yellow, red=red)
for personal in r['personal']:
question.update(personal.get(field, ""))
worksheet.write(1+row, col(), *question.get_res_style())
requested_manual = len(r.get('requested_manual', []))
if requested_manual > 0:
worksheet.write(1+row, col(), requested_manual, red)
else:
worksheet.write(1+row, col(), requested_manual)
manual_checks = len(r.get('manual_checks', []))
worksheet.write(1+row, col(), manual_checks)
worksheet.write(1+row, col(), 'http://femida.emsch.ru' + r.get('img_test_form', ""))
worksheet.write(1+row, col(), 'http://femida.emsch.ru' + r.get('img_fio', ""))
worksheet.write(1+row, col(), r.get('UUID', ""))
start = col.current()
for q in range(1, 41):
question = Question(
q, r.get('test_results', {}).get(str(q), ""), "F",
yellow, red
)
for update in r.get('test_updates', []):
if str(q) in update['updates']:
question.update(update['updates'][str(q)])
worksheet.write(1+row, start+q-1, *question.get_res_style())
col()
worksheet.write(1+row, col(), json.dumps(r, default=json_util.default))
except Exception as e:
worksheet.write(1+row, 0, 'ERROR OCCURED: ' + str(e))
workbook.close()
output.seek(0)
attachment_filename = 'femida_%s.xlsx' % datetime.datetime.now().isoformat()[:19]
return send_file(output, attachment_filename=attachment_filename, as_attachment=True)
| true
| true
|
1c47ea8507d115500d6e829758a110c6bab5ce7d
| 4,322
|
py
|
Python
|
anima/env/fusion/render_merger.py
|
MehmetErer/anima
|
f92ae599b5a4c181fc8e131a9ccdde537e635303
|
[
"MIT"
] | 101
|
2015-02-08T22:20:11.000Z
|
2022-03-21T18:56:42.000Z
|
anima/env/fusion/render_merger.py
|
Khosiyat/anima
|
f631c08400547f49ac5f1feeb730f22c255eb771
|
[
"MIT"
] | 23
|
2016-11-30T08:33:21.000Z
|
2021-01-26T12:11:12.000Z
|
anima/env/fusion/render_merger.py
|
Khosiyat/anima
|
f631c08400547f49ac5f1feeb730f22c255eb771
|
[
"MIT"
] | 27
|
2015-01-03T06:49:45.000Z
|
2021-12-28T03:30:54.000Z
|
# -*- coding: utf-8 -*-
"""Merges sliced renders in to one big plate
"""
try:
# for Fusion 6 and 7
import PeyeonScript as bmf
except ImportError:
# for Fusion 8+
import BlackmagicFusion as bmf
from anima.env.fusion.utils import NodeUtils
class RenderMerger(object):
"""A tool to merge sliced renders
"""
def __init__(self, path="", slices_in_x=5, slices_in_y=5, plate_width=0, plate_height=0):
self.fusion = bmf.scriptapp("Fusion")
self.comp = self.fusion.GetCurrentComp()
self.fusion_version = float(self.fusion.GetAttrs("FUSIONS_Version").split(".")[0])
self.path = path
self.slices_in_x = slices_in_x
self.slices_in_y = slices_in_y
self.plate_width = plate_height
self.plate_height = plate_width
def ui(self):
"""the UI for the script
"""
result = self.comp.AskUser(
'Chose Slices',
{
1: {
1: 'Slice Sequence',
2: 'FileBrowse',
'Save': False
},
2: {
1: 'Slice In Width',
2: 'Slider',
'Min': 1,
'Max': 10,
'Default': 5,
'Integer': True,
},
3: {
1: 'Slice In Height',
2: 'Slider',
'Min': 1,
'Max': 10,
'Default': 5,
'Integer': True,
}
}
)
self.path = result['Slice Sequence']
self.slices_in_x = int(result['Slice In Width'])
self.slices_in_y = int(result['Slice In Height'])
self.do_merge()
def calculate_total_width_height(self):
"""Calculates the total width and height of the resulting plate
:return (int, int): Returns the width and height of the resulting plate
"""
# calculate total width and height
self.comp.Lock()
loader = self.comp.Loader()
self.comp.Unlock()
NodeUtils.set_node_attr(loader, 'Clip', self.path)
# set input
loader.GetInputList()[10][0] = self.path
# set clip time start
loader.GetInputList()[15][0] = 0
# set clip time end
loader.GetInputList()[16][0] = 0
attrs = loader.GetAttrs()
plate_width = attrs['TOOLIT_Clip_Width'][1] * self.slices_in_x
plate_height = attrs['TOOLIT_Clip_Height'][1] * self.slices_in_y
# loader.Delete()
return plate_width, plate_height
def do_merge(self):
"""merges slices together
"""
width, height = self.calculate_total_width_height()
bg = self.comp.Background()
# set resolution
NodeUtils.set_node_attr(bg, "Width", width)
NodeUtils.set_node_attr(bg, "Height", height)
# make it black with no alpha
NodeUtils.set_node_attr(bg, "TopLeftRed", 0)
NodeUtils.set_node_attr(bg, "TopLeftGreen", 0)
NodeUtils.set_node_attr(bg, "TopLeftBlue", 0)
NodeUtils.set_node_attr(bg, "TopLeftAlpha", 0)
prev_merge = bg
t = 0
self.comp.Lock()
for i in range(self.slices_in_y):
# vertical stuff
for j in range(self.slices_in_x):
# horizontal stuff
loader = self.comp.Loader()
NodeUtils.set_node_attr(loader, 'Clip', self.path)
NodeUtils.set_node_attr(loader, "ClipTimeStart", t)
NodeUtils.set_node_attr(loader, "ClipTimeEnd", t)
merge = self.comp.Merge()
# set center offset
h_offset = 0.5 / self.slices_in_x + j * 1.0 / self.slices_in_x
v_offset = 0.5 / self.slices_in_y + i * 1.0 / self.slices_in_y
NodeUtils.set_node_attr(merge, "Center", {
1.0: h_offset,
2.0: v_offset,
3.0: 0.0
})
# connect it to the previous merges output
merge.Background = prev_merge
merge.Foreground = loader
prev_merge = merge
t += 1
self.comp.Unlock()
| 30.013889
| 93
| 0.519435
|
try:
import PeyeonScript as bmf
except ImportError:
import BlackmagicFusion as bmf
from anima.env.fusion.utils import NodeUtils
class RenderMerger(object):
def __init__(self, path="", slices_in_x=5, slices_in_y=5, plate_width=0, plate_height=0):
self.fusion = bmf.scriptapp("Fusion")
self.comp = self.fusion.GetCurrentComp()
self.fusion_version = float(self.fusion.GetAttrs("FUSIONS_Version").split(".")[0])
self.path = path
self.slices_in_x = slices_in_x
self.slices_in_y = slices_in_y
self.plate_width = plate_height
self.plate_height = plate_width
def ui(self):
result = self.comp.AskUser(
'Chose Slices',
{
1: {
1: 'Slice Sequence',
2: 'FileBrowse',
'Save': False
},
2: {
1: 'Slice In Width',
2: 'Slider',
'Min': 1,
'Max': 10,
'Default': 5,
'Integer': True,
},
3: {
1: 'Slice In Height',
2: 'Slider',
'Min': 1,
'Max': 10,
'Default': 5,
'Integer': True,
}
}
)
self.path = result['Slice Sequence']
self.slices_in_x = int(result['Slice In Width'])
self.slices_in_y = int(result['Slice In Height'])
self.do_merge()
def calculate_total_width_height(self):
self.comp.Lock()
loader = self.comp.Loader()
self.comp.Unlock()
NodeUtils.set_node_attr(loader, 'Clip', self.path)
loader.GetInputList()[10][0] = self.path
loader.GetInputList()[15][0] = 0
loader.GetInputList()[16][0] = 0
attrs = loader.GetAttrs()
plate_width = attrs['TOOLIT_Clip_Width'][1] * self.slices_in_x
plate_height = attrs['TOOLIT_Clip_Height'][1] * self.slices_in_y
return plate_width, plate_height
def do_merge(self):
width, height = self.calculate_total_width_height()
bg = self.comp.Background()
NodeUtils.set_node_attr(bg, "Width", width)
NodeUtils.set_node_attr(bg, "Height", height)
NodeUtils.set_node_attr(bg, "TopLeftRed", 0)
NodeUtils.set_node_attr(bg, "TopLeftGreen", 0)
NodeUtils.set_node_attr(bg, "TopLeftBlue", 0)
NodeUtils.set_node_attr(bg, "TopLeftAlpha", 0)
prev_merge = bg
t = 0
self.comp.Lock()
for i in range(self.slices_in_y):
for j in range(self.slices_in_x):
loader = self.comp.Loader()
NodeUtils.set_node_attr(loader, 'Clip', self.path)
NodeUtils.set_node_attr(loader, "ClipTimeStart", t)
NodeUtils.set_node_attr(loader, "ClipTimeEnd", t)
merge = self.comp.Merge()
h_offset = 0.5 / self.slices_in_x + j * 1.0 / self.slices_in_x
v_offset = 0.5 / self.slices_in_y + i * 1.0 / self.slices_in_y
NodeUtils.set_node_attr(merge, "Center", {
1.0: h_offset,
2.0: v_offset,
3.0: 0.0
})
merge.Background = prev_merge
merge.Foreground = loader
prev_merge = merge
t += 1
self.comp.Unlock()
| true
| true
|
1c47ea87396ab3c1281919e883bf6c4c9a9cdcd1
| 757
|
py
|
Python
|
mysite/polls/admin.py
|
thomasbtf/hello-django
|
37c5188667d6eeb2043e3bdd45294dd16e754507
|
[
"MIT"
] | null | null | null |
mysite/polls/admin.py
|
thomasbtf/hello-django
|
37c5188667d6eeb2043e3bdd45294dd16e754507
|
[
"MIT"
] | null | null | null |
mysite/polls/admin.py
|
thomasbtf/hello-django
|
37c5188667d6eeb2043e3bdd45294dd16e754507
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Choice, Question
class ChoiceInline(admin.TabularInline):
model = Choice
extra = 3
class QuestionAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields': ['question_text']}),
('Date information', {'fields': ['pub_date'], 'classes': ['collapse']}),
]
inlines = [ChoiceInline]
list_filter = ['pub_date']
search_fields = ['question_text']
@admin.display(
boolean=True,
ordering='pub_date',
description='Published recently?',
)
def was_published_recently(self):
now = timezone.now()
return now - datetime.timedelta(days=1) <= self.pub_date <= now
admin.site.register(Question, QuestionAdmin)
| 26.103448
| 80
| 0.630119
|
from django.contrib import admin
from .models import Choice, Question
class ChoiceInline(admin.TabularInline):
model = Choice
extra = 3
class QuestionAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields': ['question_text']}),
('Date information', {'fields': ['pub_date'], 'classes': ['collapse']}),
]
inlines = [ChoiceInline]
list_filter = ['pub_date']
search_fields = ['question_text']
@admin.display(
boolean=True,
ordering='pub_date',
description='Published recently?',
)
def was_published_recently(self):
now = timezone.now()
return now - datetime.timedelta(days=1) <= self.pub_date <= now
admin.site.register(Question, QuestionAdmin)
| true
| true
|
1c47eaf899d1f4ffce8bd8e362130e85c306d2bf
| 1,239
|
py
|
Python
|
jip/cli/jip_cancel.py
|
VDBWRAIR/pyjip
|
dc147afebbabd550828fa51cc052db4aa07c5d3b
|
[
"BSD-3-Clause"
] | 18
|
2015-05-08T06:39:09.000Z
|
2020-11-30T10:51:36.000Z
|
jip/cli/jip_cancel.py
|
VDBWRAIR/pyjip
|
dc147afebbabd550828fa51cc052db4aa07c5d3b
|
[
"BSD-3-Clause"
] | 9
|
2015-01-02T09:55:53.000Z
|
2016-02-03T18:31:10.000Z
|
jip/cli/jip_cancel.py
|
VDBWRAIR/pyjip
|
dc147afebbabd550828fa51cc052db4aa07c5d3b
|
[
"BSD-3-Clause"
] | 5
|
2016-02-01T16:52:36.000Z
|
2021-03-10T12:08:39.000Z
|
#!/usr/bin/env python
"""
Cancel jip jobs
Usage:
jip-cancel [-j <id>...] [-J <cid>...] [--clean]
jip-cancel [--help|-h]
Options:
--clean Remove the logfiles
-j, --job <id>... List jobs with specified id
-J, --cluster-job <cid>... List jobs with specified cluster id
-h --help Show this help message
"""
import jip.db
import jip.jobs
from . import parse_args, parse_job_ids, confirm
import sys
def main():
args = parse_args(__doc__, options_first=False)
job_ids, cluster_ids = parse_job_ids(args)
jobs = jip.db.query(job_ids=job_ids, cluster_ids=cluster_ids,
archived=None)
jobs = list(jobs)
if len(jobs) == 0:
return
# get full pipelines
jobs = jip.jobs.resolve_jobs(jobs)
if confirm("Are you sure you want "
"to cancel %d jobs" % len(jobs),
False):
print >>sys.stderr, "Cancelling %s jobs" % len(jobs)
for job in jobs:
if jip.jobs.cancel(job, clean_logs=args['--clean'],
save=True, cancel_children=False):
print >>sys.stderr, "Canceled %s" % job.id
if __name__ == "__main__":
main()
| 26.934783
| 67
| 0.560936
|
import jip.db
import jip.jobs
from . import parse_args, parse_job_ids, confirm
import sys
def main():
args = parse_args(__doc__, options_first=False)
job_ids, cluster_ids = parse_job_ids(args)
jobs = jip.db.query(job_ids=job_ids, cluster_ids=cluster_ids,
archived=None)
jobs = list(jobs)
if len(jobs) == 0:
return
jobs = jip.jobs.resolve_jobs(jobs)
if confirm("Are you sure you want "
"to cancel %d jobs" % len(jobs),
False):
print >>sys.stderr, "Cancelling %s jobs" % len(jobs)
for job in jobs:
if jip.jobs.cancel(job, clean_logs=args['--clean'],
save=True, cancel_children=False):
print >>sys.stderr, "Canceled %s" % job.id
if __name__ == "__main__":
main()
| true
| true
|
1c47eaffe4e7767186947540b4dc6ce1552877e7
| 311
|
py
|
Python
|
tests/conftest.py
|
valr/flask-webhook
|
864a86ad645a958fa7eee9cc1622ca84a79e5801
|
[
"MIT"
] | null | null | null |
tests/conftest.py
|
valr/flask-webhook
|
864a86ad645a958fa7eee9cc1622ca84a79e5801
|
[
"MIT"
] | null | null | null |
tests/conftest.py
|
valr/flask-webhook
|
864a86ad645a958fa7eee9cc1622ca84a79e5801
|
[
"MIT"
] | null | null | null |
import os
import pytest
from application import create_application
@pytest.fixture
def application():
application = create_application(os.environ.get("INSTANCE_PATH"))
application.testing = True
return application
@pytest.fixture
def client(application):
return application.test_client()
| 16.368421
| 69
| 0.771704
|
import os
import pytest
from application import create_application
@pytest.fixture
def application():
application = create_application(os.environ.get("INSTANCE_PATH"))
application.testing = True
return application
@pytest.fixture
def client(application):
return application.test_client()
| true
| true
|
1c47ec05d04593b3dbbb896032df03930856074c
| 40,188
|
py
|
Python
|
core/domain/topic_services.py
|
sagangwee/oppia
|
c4bf0673b4d3ec30cff609109241656f71a63a82
|
[
"Apache-2.0"
] | null | null | null |
core/domain/topic_services.py
|
sagangwee/oppia
|
c4bf0673b4d3ec30cff609109241656f71a63a82
|
[
"Apache-2.0"
] | 7
|
2019-08-20T08:30:43.000Z
|
2022-02-12T18:47:57.000Z
|
core/domain/topic_services.py
|
ledriod/oppia
|
4f8f95c6689cd36f0b65672b80d98a3463b001f8
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
#
# Copyright 2018 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.]
"""Commands for operations on topics, and related models."""
import collections
import copy
import logging
from core.domain import role_services
from core.domain import subtopic_page_domain
from core.domain import subtopic_page_services
from core.domain import topic_domain
from core.domain import user_services
from core.platform import models
import feconf
(topic_models,) = models.Registry.import_models([models.NAMES.topic])
datastore_services = models.Registry.import_datastore_services()
memcache_services = models.Registry.import_memcache_services()
def _migrate_subtopics_to_latest_schema(versioned_subtopics):
"""Holds the responsibility of performing a step-by-step, sequential update
of the subtopics structure based on the schema version of the input
subtopics dictionary. If the current subtopics schema changes, a
new conversion function must be added and some code appended to this
function to account for that new version.
Args:
versioned_subtopics: A dict with two keys:
- schema_version: int. The schema version for the subtopics dict.
- subtopics: list(dict). The list of dicts comprising the topic's
subtopics.
Raises:
Exception: The schema version of subtopics is outside of what
is supported at present.
"""
subtopic_schema_version = versioned_subtopics['schema_version']
if not (1 <= subtopic_schema_version
<= feconf.CURRENT_SUBTOPIC_SCHEMA_VERSION):
raise Exception(
'Sorry, we can only process v1-v%d subtopic schemas at '
'present.' % feconf.CURRENT_SUBTOPIC_SCHEMA_VERSION)
while (subtopic_schema_version <
feconf.CURRENT_SUBTOPIC_SCHEMA_VERSION):
topic_domain.Topic.update_subtopics_from_model(
versioned_subtopics, subtopic_schema_version)
subtopic_schema_version += 1
# Repository GET methods.
def _get_topic_memcache_key(topic_id, version=None):
"""Returns a memcache key for the topic.
Args:
topic_id: str. ID of the topic.
version: int. The version of the topic.
Returns:
str. The memcache key of the topic.
"""
if version:
return 'topic-version:%s:%s' % (topic_id, version)
else:
return 'topic:%s' % topic_id
def get_topic_from_model(topic_model):
"""Returns a topic domain object given a topic model loaded
from the datastore.
Args:
topic_model: TopicModel. The topic model loaded from the
datastore.
Returns:
topic. A Topic domain object corresponding to the given
topic model.
"""
versioned_subtopics = {
'schema_version': topic_model.subtopic_schema_version,
'subtopics': copy.deepcopy(topic_model.subtopics)
}
if (topic_model.subtopic_schema_version !=
feconf.CURRENT_SUBTOPIC_SCHEMA_VERSION):
_migrate_subtopics_to_latest_schema(versioned_subtopics)
return topic_domain.Topic(
topic_model.id, topic_model.name,
topic_model.description, topic_model.canonical_story_ids,
topic_model.additional_story_ids, topic_model.uncategorized_skill_ids,
[
topic_domain.Subtopic.from_dict(subtopic)
for subtopic in versioned_subtopics['subtopics']
],
versioned_subtopics['schema_version'],
topic_model.next_subtopic_id,
topic_model.language_code,
topic_model.version, topic_model.created_on,
topic_model.last_updated)
def get_all_topic_summaries():
"""Returns the summaries of all topics present in the datastore.
Returns:
list(TopicSummary). The list of summaries of all topics present in the
datastore.
"""
topic_summaries_models = topic_models.TopicSummaryModel.get_all()
topic_summaries = [
get_topic_summary_from_model(summary)
for summary in topic_summaries_models]
return topic_summaries
def get_all_skill_ids_assigned_to_some_topic():
"""Returns the ids of all the skills that are linked to some topics.
Returns:
set([str]). The ids of all the skills linked to some topic.
"""
skill_ids = set([])
all_topic_models = topic_models.TopicModel.get_all()
all_topics = [get_topic_from_model(topic) for topic in all_topic_models]
for topic in all_topics:
skill_ids.update(topic.get_all_skill_ids())
return skill_ids
def get_topic_summary_from_model(topic_summary_model):
"""Returns a domain object for an Oppia topic summary given a
topic summary model.
Args:
topic_summary_model: TopicSummaryModel.
Returns:
TopicSummary.
"""
return topic_domain.TopicSummary(
topic_summary_model.id, topic_summary_model.name,
topic_summary_model.canonical_name,
topic_summary_model.language_code,
topic_summary_model.version,
topic_summary_model.canonical_story_count,
topic_summary_model.additional_story_count,
topic_summary_model.uncategorized_skill_count,
topic_summary_model.subtopic_count,
topic_summary_model.total_skill_count,
topic_summary_model.topic_model_created_on,
topic_summary_model.topic_model_last_updated
)
def get_topic_by_id(topic_id, strict=True, version=None):
"""Returns a domain object representing a topic.
Args:
topic_id: str. ID of the topic.
strict: bool. Whether to fail noisily if no topic with the given
id exists in the datastore.
version: int or None. The version number of the topic to be
retrieved. If it is None, the latest version will be retrieved.
Returns:
Topic or None. The domain object representing a topic with the
given id, or None if it does not exist.
"""
topic_memcache_key = _get_topic_memcache_key(topic_id, version=version)
memcached_topic = memcache_services.get_multi(
[topic_memcache_key]).get(topic_memcache_key)
if memcached_topic is not None:
return memcached_topic
else:
topic_model = topic_models.TopicModel.get(
topic_id, strict=strict, version=version)
if topic_model:
topic = get_topic_from_model(topic_model)
memcache_services.set_multi({topic_memcache_key: topic})
return topic
else:
return None
def get_topics_by_ids(topic_ids):
"""Returns a list of topics matching the IDs provided.
Args:
topic_ids: list(str). List of IDs to get topics for.
Returns:
list(Topic|None). The list of topics corresponding to given ids
(with None in place of topic ids corresponding to deleted topics).
"""
all_topic_models = topic_models.TopicModel.get_multi(topic_ids)
topics = [
get_topic_from_model(topic_model) if topic_model is not None else None
for topic_model in all_topic_models]
return topics
def get_topic_by_name(topic_name):
"""Returns a domain object representing a topic.
Args:
topic_name: str. The name of the topic.
Returns:
Topic or None. The domain object representing a topic with the
given id, or None if it does not exist.
"""
topic_model = topic_models.TopicModel.get_by_name(topic_name)
if topic_model is None:
return None
topic = get_topic_from_model(topic_model)
return topic
def get_topic_summary_by_id(topic_id, strict=True):
"""Returns a domain object representing a topic summary.
Args:
topic_id: str. ID of the topic summary.
strict: bool. Whether to fail noisily if no topic summary with the given
id exists in the datastore.
Returns:
TopicSummary or None. The topic summary domain object corresponding to
a topic with the given topic_id, if it exists, or else None.
"""
topic_summary_model = topic_models.TopicSummaryModel.get(
topic_id, strict=strict)
if topic_summary_model:
topic_summary = get_topic_summary_from_model(topic_summary_model)
return topic_summary
else:
return None
def get_new_topic_id():
"""Returns a new topic id.
Returns:
str. A new topic id.
"""
return topic_models.TopicModel.get_new_id('')
def _create_topic(committer_id, topic, commit_message, commit_cmds):
"""Creates a new topic, and ensures that rights for a new topic
are saved first.
Args:
committer_id: str. ID of the committer.
topic: Topic. topic domain object.
commit_message: str. A description of changes made to the topic.
commit_cmds: list(TopicChange). A list of TopicChange objects that
represent change commands made to the given topic.
"""
topic.validate()
create_new_topic_rights(topic.id, committer_id)
model = topic_models.TopicModel(
id=topic.id,
name=topic.name,
canonical_name=topic.canonical_name,
description=topic.description,
language_code=topic.language_code,
canonical_story_ids=topic.canonical_story_ids,
additional_story_ids=topic.additional_story_ids,
uncategorized_skill_ids=topic.uncategorized_skill_ids,
subtopic_schema_version=topic.subtopic_schema_version,
next_subtopic_id=topic.next_subtopic_id,
subtopics=[subtopic.to_dict() for subtopic in topic.subtopics]
)
commit_cmd_dicts = [commit_cmd.to_dict() for commit_cmd in commit_cmds]
model.commit(committer_id, commit_message, commit_cmd_dicts)
topic.version += 1
create_topic_summary(topic.id)
def save_new_topic(committer_id, topic):
"""Saves a new topic.
Args:
committer_id: str. ID of the committer.
topic: Topic. Topic to be saved.
Raises:
Exception. Topic with same name already exists.
"""
existing_topic = get_topic_by_name(topic.name)
if existing_topic is not None:
raise Exception('Topic with name \'%s\' already exists' % topic.name)
commit_message = (
'New topic created with name \'%s\'.' % topic.name)
_create_topic(
committer_id, topic, commit_message, [topic_domain.TopicChange({
'cmd': topic_domain.CMD_CREATE_NEW,
'name': topic.name
})])
def apply_change_list(topic_id, change_list):
"""Applies a changelist to a topic and returns the result. The incoming
changelist should not have simultaneuous creations and deletion of
subtopics.
Args:
topic_id: str. ID of the given topic.
change_list: list(TopicChange). A change list to be applied to the given
topic.
Raises:
Exception. The incoming changelist had simultaneuous creation and
deletion of subtopics.
Returns:
Topic, dict, list(int), list(int), list(SubtopicPageChange).
The modified topic object, the modified subtopic pages dict keyed
by subtopic page id containing the updated domain objects of
each subtopic page, a list of ids of the deleted subtopics,
a list of ids of the newly created subtopics and a list of changes
applied to modified subtopic pages.
"""
topic = get_topic_by_id(topic_id)
newly_created_subtopic_ids = []
existing_subtopic_page_ids_to_be_modified = []
deleted_subtopic_ids = []
modified_subtopic_pages_list = []
modified_subtopic_pages = {}
modified_subtopic_change_cmds = collections.defaultdict(list)
for change in change_list:
if (change.cmd ==
subtopic_page_domain.CMD_UPDATE_SUBTOPIC_PAGE_PROPERTY):
if change.subtopic_id < topic.next_subtopic_id:
existing_subtopic_page_ids_to_be_modified.append(
change.subtopic_id)
subtopic_page_id = (
subtopic_page_domain.SubtopicPage.get_subtopic_page_id(
topic_id, change.subtopic_id))
modified_subtopic_change_cmds[subtopic_page_id].append(
change)
modified_subtopic_pages_list = (
subtopic_page_services.get_subtopic_pages_with_ids(
topic_id, existing_subtopic_page_ids_to_be_modified))
for subtopic_page in modified_subtopic_pages_list:
modified_subtopic_pages[subtopic_page.id] = subtopic_page
try:
for change in change_list:
if change.cmd == topic_domain.CMD_ADD_SUBTOPIC:
topic.add_subtopic(change.subtopic_id, change.title)
subtopic_page_id = (
subtopic_page_domain.SubtopicPage.get_subtopic_page_id(
topic_id, change.subtopic_id))
modified_subtopic_pages[subtopic_page_id] = (
subtopic_page_domain.SubtopicPage.create_default_subtopic_page( #pylint: disable=line-too-long
change.subtopic_id, topic_id)
)
modified_subtopic_change_cmds[subtopic_page_id].append(
subtopic_page_domain.SubtopicPageChange({
'cmd': 'create_new',
'topic_id': topic_id,
'subtopic_id': change.subtopic_id
}))
newly_created_subtopic_ids.append(change.subtopic_id)
elif change.cmd == topic_domain.CMD_DELETE_SUBTOPIC:
topic.delete_subtopic(change.subtopic_id)
if change.subtopic_id in newly_created_subtopic_ids:
raise Exception(
'The incoming changelist had simultaneous'
' creation and deletion of subtopics.')
deleted_subtopic_ids.append(change.subtopic_id)
elif change.cmd == topic_domain.CMD_ADD_UNCATEGORIZED_SKILL_ID:
topic.add_uncategorized_skill_id(
change.new_uncategorized_skill_id)
elif change.cmd == topic_domain.CMD_REMOVE_UNCATEGORIZED_SKILL_ID:
topic.remove_uncategorized_skill_id(
change.uncategorized_skill_id)
elif change.cmd == topic_domain.CMD_MOVE_SKILL_ID_TO_SUBTOPIC:
topic.move_skill_id_to_subtopic(
change.old_subtopic_id, change.new_subtopic_id,
change.skill_id)
elif change.cmd == topic_domain.CMD_REMOVE_SKILL_ID_FROM_SUBTOPIC:
topic.remove_skill_id_from_subtopic(
change.subtopic_id, change.skill_id)
elif change.cmd == topic_domain.CMD_UPDATE_TOPIC_PROPERTY:
if (change.property_name ==
topic_domain.TOPIC_PROPERTY_NAME):
topic.update_name(change.new_value)
elif (change.property_name ==
topic_domain.TOPIC_PROPERTY_DESCRIPTION):
topic.update_description(change.new_value)
elif (change.property_name ==
topic_domain.TOPIC_PROPERTY_CANONICAL_STORY_IDS):
topic.update_canonical_story_ids(change.new_value)
elif (change.property_name ==
topic_domain.TOPIC_PROPERTY_ADDITIONAL_STORY_IDS):
topic.update_additional_story_ids(change.new_value)
elif (change.property_name ==
topic_domain.TOPIC_PROPERTY_LANGUAGE_CODE):
topic.update_language_code(change.new_value)
elif (change.cmd ==
subtopic_page_domain.CMD_UPDATE_SUBTOPIC_PAGE_PROPERTY):
subtopic_page_id = (
subtopic_page_domain.SubtopicPage.get_subtopic_page_id(
topic_id, change.subtopic_id))
if ((modified_subtopic_pages[subtopic_page_id] is None) or
(change.subtopic_id in deleted_subtopic_ids)):
raise Exception(
'The subtopic with id %s doesn\'t exist' % (
change.subtopic_id))
if (change.property_name ==
subtopic_page_domain.
SUBTOPIC_PAGE_PROPERTY_PAGE_CONTENTS_HTML):
modified_subtopic_pages[
subtopic_page_id].update_page_contents_html(
change.new_value)
elif (change.property_name ==
subtopic_page_domain.
SUBTOPIC_PAGE_PROPERTY_PAGE_CONTENTS_AUDIO):
modified_subtopic_pages[
subtopic_page_id].update_page_contents_audio(
change.new_value)
elif change.cmd == topic_domain.CMD_UPDATE_SUBTOPIC_PROPERTY:
if (change.property_name ==
topic_domain.SUBTOPIC_PROPERTY_TITLE):
topic.update_subtopic_title(
change.subtopic_id, change.new_value)
elif (
change.cmd ==
topic_domain.CMD_MIGRATE_SUBTOPIC_SCHEMA_TO_LATEST_VERSION):
# Loading the topic model from the datastore into a
# Topic domain object automatically converts it to use the
# latest schema version. As a result, simply resaving the
# topic is sufficient to apply the schema migration.
continue
return (
topic, modified_subtopic_pages, deleted_subtopic_ids,
newly_created_subtopic_ids, modified_subtopic_change_cmds)
except Exception as e:
logging.error(
'%s %s %s %s' % (
e.__class__.__name__, e, topic_id, change_list)
)
raise
def _save_topic(committer_id, topic, commit_message, change_list):
"""Validates a topic and commits it to persistent storage. If
successful, increments the version number of the incoming topic domain
object by 1.
Args:
committer_id: str. ID of the given committer.
topic: Topic. The topic domain object to be saved.
commit_message: str. The commit message.
change_list: list(TopicChange). List of changes applied to a topic.
Raises:
Exception: Received invalid change list.
Exception: The topic model and the incoming topic domain
object have different version numbers.
"""
if not change_list:
raise Exception(
'Unexpected error: received an invalid change list when trying to '
'save topic %s: %s' % (topic.id, change_list))
topic.validate()
topic_model = topic_models.TopicModel.get(topic.id, strict=False)
# Topic model cannot be None as topic is passed as parameter here and that
# is only possible if a topic model with that topic id exists. Also this is
# a private function and so it cannot be called independently with any
# topic object.
if topic.version > topic_model.version:
raise Exception(
'Unexpected error: trying to update version %s of topic '
'from version %s. Please reload the page and try again.'
% (topic_model.version, topic.version))
elif topic.version < topic_model.version:
raise Exception(
'Trying to update version %s of topic from version %s, '
'which is too old. Please reload the page and try again.'
% (topic_model.version, topic.version))
topic_model.description = topic.description
topic_model.name = topic.name
topic_model.canonical_story_ids = topic.canonical_story_ids
topic_model.additional_story_ids = topic.additional_story_ids
topic_model.uncategorized_skill_ids = topic.uncategorized_skill_ids
topic_model.subtopics = [subtopic.to_dict() for subtopic in topic.subtopics]
topic_model.subtopic_schema_version = topic.subtopic_schema_version
topic_model.next_subtopic_id = topic.next_subtopic_id
topic_model.language_code = topic.language_code
change_dicts = [change.to_dict() for change in change_list]
topic_model.commit(committer_id, commit_message, change_dicts)
memcache_services.delete(_get_topic_memcache_key(topic.id))
topic.version += 1
def update_topic_and_subtopic_pages(
committer_id, topic_id, change_list, commit_message):
"""Updates a topic and its subtopic pages. Commits changes.
Args:
committer_id: str. The id of the user who is performing the update
action.
topic_id: str. The topic id.
change_list: list(TopicChange and SubtopicPageChange). These changes are
applied in sequence to produce the resulting topic.
commit_message: str or None. A description of changes made to the
topic.
Raises:
ValueError: Current user does not have enough rights to edit a topic.
"""
if not commit_message:
raise ValueError(
'Expected a commit message, received none.')
(
updated_topic, updated_subtopic_pages_dict,
deleted_subtopic_ids, newly_created_subtopic_ids,
updated_subtopic_pages_change_cmds_dict
) = apply_change_list(topic_id, change_list)
_save_topic(
committer_id, updated_topic, commit_message, change_list
)
# The following loop deletes those subtopic pages that are already in the
# datastore, which are supposed to be deleted in the current changelist.
for subtopic_id in deleted_subtopic_ids:
if subtopic_id not in newly_created_subtopic_ids:
subtopic_page_services.delete_subtopic_page(
committer_id, topic_id, subtopic_id)
for subtopic_page_id in updated_subtopic_pages_dict:
subtopic_page = updated_subtopic_pages_dict[subtopic_page_id]
subtopic_page_change_list = updated_subtopic_pages_change_cmds_dict[
subtopic_page_id]
subtopic_id = subtopic_page.get_subtopic_id_from_subtopic_page_id()
# The following condition prevents the creation of subtopic pages that
# were deleted above.
if subtopic_id not in deleted_subtopic_ids:
subtopic_page_services.save_subtopic_page(
committer_id, subtopic_page, commit_message,
subtopic_page_change_list)
create_topic_summary(topic_id)
def delete_uncategorized_skill(user_id, topic_id, uncategorized_skill_id):
"""Removes skill with given id from the topic.
Args:
user_id: str. The id of the user who is performing the action.
topic_id: str. The id of the topic from which to remove the skill.
uncategorized_skill_id: str. The uncategorized skill to remove from the
topic.
"""
change_list = [topic_domain.TopicChange({
'cmd': 'remove_uncategorized_skill_id',
'uncategorized_skill_id': uncategorized_skill_id
})]
update_topic_and_subtopic_pages(
user_id, topic_id, change_list,
'Removed %s from uncategorized skill ids' % uncategorized_skill_id)
def add_uncategorized_skill(user_id, topic_id, uncategorized_skill_id):
"""Adds a skill with given id to the topic.
Args:
user_id: str. The id of the user who is performing the action.
topic_id: str. The id of the topic to which the skill is to be added.
uncategorized_skill_id: str. The id of the uncategorized skill to add
to the topic.
"""
change_list = [topic_domain.TopicChange({
'cmd': 'add_uncategorized_skill_id',
'new_uncategorized_skill_id': uncategorized_skill_id
})]
update_topic_and_subtopic_pages(
user_id, topic_id, change_list,
'Added %s to uncategorized skill ids' % uncategorized_skill_id)
def delete_story(user_id, topic_id, story_id):
"""Removes story with given id from the topic.
NOTE TO DEVELOPERS: Presently, this function only removes story_id from
canonical_story_ids list.
Args:
user_id: str. The id of the user who is performing the action.
topic_id: str. The id of the topic from which to remove the story.
story_id: str. The story to remove from the topic.
"""
topic = get_topic_by_id(topic_id)
old_canonical_story_ids = copy.deepcopy(topic.canonical_story_ids)
topic.delete_story(story_id)
change_list = [topic_domain.TopicChange({
'cmd': 'update_topic_property',
'property_name': 'canonical_story_ids',
'old_value': old_canonical_story_ids,
'new_value': topic.canonical_story_ids
})]
update_topic_and_subtopic_pages(
user_id, topic_id, change_list,
'Removed %s from canonical story ids' % story_id)
def add_canonical_story(user_id, topic_id, story_id):
"""Adds a story to the canonical story id list of a topic.
Args:
user_id: str. The id of the user who is performing the action.
topic_id: str. The id of the topic to which the story is to be added.
story_id: str. The story to add to the topic.
"""
topic = get_topic_by_id(topic_id)
old_canonical_story_ids = copy.deepcopy(topic.canonical_story_ids)
topic.add_canonical_story(story_id)
change_list = [topic_domain.TopicChange({
'cmd': 'update_topic_property',
'property_name': 'canonical_story_ids',
'old_value': old_canonical_story_ids,
'new_value': topic.canonical_story_ids
})]
update_topic_and_subtopic_pages(
user_id, topic_id, change_list,
'Added %s to canonical story ids' % story_id)
def delete_topic(committer_id, topic_id, force_deletion=False):
"""Deletes the topic with the given topic_id.
Args:
committer_id: str. ID of the committer.
topic_id: str. ID of the topic to be deleted.
force_deletion: bool. If true, the topic and its history are fully
deleted and are unrecoverable. Otherwise, the topic and all
its history are marked as deleted, but the corresponding models are
still retained in the datastore. This last option is the preferred
one.
Raises:
ValueError: User does not have enough rights to delete a topic.
"""
topic_rights_model = topic_models.TopicRightsModel.get(topic_id)
topic_rights_model.delete(
committer_id, feconf.COMMIT_MESSAGE_TOPIC_DELETED,
force_deletion=force_deletion)
# Delete the summary of the topic (regardless of whether
# force_deletion is True or not).
delete_topic_summary(topic_id)
topic_model = topic_models.TopicModel.get(topic_id)
for subtopic in topic_model.subtopics:
subtopic_page_services.delete_subtopic_page(
committer_id, topic_id, subtopic['id'])
topic_model.delete(
committer_id, feconf.COMMIT_MESSAGE_TOPIC_DELETED,
force_deletion=force_deletion)
# This must come after the topic is retrieved. Otherwise the memcache
# key will be reinstated.
topic_memcache_key = _get_topic_memcache_key(topic_id)
memcache_services.delete(topic_memcache_key)
def delete_topic_summary(topic_id):
"""Delete a topic summary model.
Args:
topic_id: str. ID of the topic whose topic summary is to
be deleted.
"""
topic_models.TopicSummaryModel.get(topic_id).delete()
def create_topic_summary(topic_id):
"""Creates and stores a summary of the given topic.
Args:
topic_id: str. ID of the topic.
"""
topic = get_topic_by_id(topic_id)
topic_summary = compute_summary_of_topic(topic)
save_topic_summary(topic_summary)
def compute_summary_of_topic(topic):
"""Create a TopicSummary domain object for a given Topic domain
object and return it.
Args:
topic: Topic. The topic object for which the summary is to be computed.
Returns:
TopicSummary. The computed summary for the given topic.
"""
topic_model_canonical_story_count = len(topic.canonical_story_ids)
topic_model_additional_story_count = len(topic.additional_story_ids)
topic_model_uncategorized_skill_count = len(topic.uncategorized_skill_ids)
topic_model_subtopic_count = len(topic.subtopics)
total_skill_count = topic_model_uncategorized_skill_count
for subtopic in topic.subtopics:
total_skill_count = total_skill_count + len(subtopic.skill_ids)
topic_summary = topic_domain.TopicSummary(
topic.id, topic.name, topic.canonical_name, topic.language_code,
topic.version, topic_model_canonical_story_count,
topic_model_additional_story_count,
topic_model_uncategorized_skill_count, topic_model_subtopic_count,
total_skill_count, topic.created_on, topic.last_updated
)
return topic_summary
def save_topic_summary(topic_summary):
"""Save a topic summary domain object as a TopicSummaryModel
entity in the datastore.
Args:
topic_summary: The topic summary object to be saved in the
datastore.
"""
topic_summary_model = topic_models.TopicSummaryModel(
id=topic_summary.id,
name=topic_summary.name,
canonical_name=topic_summary.canonical_name,
language_code=topic_summary.language_code,
version=topic_summary.version,
additional_story_count=topic_summary.additional_story_count,
canonical_story_count=topic_summary.canonical_story_count,
uncategorized_skill_count=topic_summary.uncategorized_skill_count,
subtopic_count=topic_summary.subtopic_count,
total_skill_count=topic_summary.total_skill_count,
topic_model_last_updated=topic_summary.topic_model_last_updated,
topic_model_created_on=topic_summary.topic_model_created_on
)
topic_summary_model.put()
def get_topic_rights_from_model(topic_rights_model):
"""Constructs a TopicRights object from the given topic rights model.
Args:
topic_rights_model: TopicRightsModel. Topic rights from the
datastore.
Returns:
TopicRights. The rights object created from the model.
"""
return topic_domain.TopicRights(
topic_rights_model.id,
topic_rights_model.manager_ids,
topic_rights_model.topic_is_published
)
def publish_topic(topic_id, committer_id):
"""Marks the given topic as published.
Args:
topic_id: str. The id of the given topic.
committer_id: str. ID of the committer.
Raises:
Exception. The given topic does not exist.
Exception. The topic is already published.
Exception. The user does not have enough rights to publish the topic.
"""
topic_rights = get_topic_rights(topic_id, strict=False)
if topic_rights is None:
raise Exception('The given topic does not exist')
user = user_services.UserActionsInfo(committer_id)
if role_services.ACTION_CHANGE_TOPIC_STATUS not in user.actions:
raise Exception(
'The user does not have enough rights to publish the topic.')
if topic_rights.topic_is_published:
raise Exception('The topic is already published.')
topic_rights.topic_is_published = True
commit_cmds = [topic_domain.TopicRightsChange({
'cmd': topic_domain.CMD_PUBLISH_TOPIC
})]
save_topic_rights(
topic_rights, committer_id, 'Published the topic', commit_cmds)
def unpublish_topic(topic_id, committer_id):
"""Marks the given topic as unpublished.
Args:
topic_id: str. The id of the given topic.
committer_id: str. ID of the committer.
Raises:
Exception. The given topic does not exist.
Exception. The topic is already unpublished.
Exception. The user does not have enough rights to unpublish the topic.
"""
topic_rights = get_topic_rights(topic_id, strict=False)
if topic_rights is None:
raise Exception('The given topic does not exist')
user = user_services.UserActionsInfo(committer_id)
if role_services.ACTION_CHANGE_TOPIC_STATUS not in user.actions:
raise Exception(
'The user does not have enough rights to unpublish the topic.')
if not topic_rights.topic_is_published:
raise Exception('The topic is already unpublished.')
topic_rights.topic_is_published = False
commit_cmds = [topic_domain.TopicRightsChange({
'cmd': topic_domain.CMD_UNPUBLISH_TOPIC
})]
save_topic_rights(
topic_rights, committer_id, 'Unpublished the topic', commit_cmds)
def save_topic_rights(topic_rights, committer_id, commit_message, commit_cmds):
"""Saves a TopicRights domain object to the datastore.
Args:
topic_rights: TopicRights. The rights object for the given
topic.
committer_id: str. ID of the committer.
commit_message: str. Descriptive message for the commit.
commit_cmds: list(TopicRightsChange). A list of commands describing
what kind of commit was done.
"""
model = topic_models.TopicRightsModel.get(topic_rights.id, strict=False)
model.manager_ids = topic_rights.manager_ids
model.topic_is_published = topic_rights.topic_is_published
commit_cmd_dicts = [commit_cmd.to_dict() for commit_cmd in commit_cmds]
model.commit(committer_id, commit_message, commit_cmd_dicts)
def create_new_topic_rights(topic_id, committer_id):
"""Creates a new topic rights object and saves it to the datastore.
Args:
topic_id: str. ID of the topic.
committer_id: str. ID of the committer.
"""
topic_rights = topic_domain.TopicRights(topic_id, [], False)
commit_cmds = [{'cmd': topic_domain.CMD_CREATE_NEW}]
topic_models.TopicRightsModel(
id=topic_rights.id,
manager_ids=topic_rights.manager_ids,
topic_is_published=topic_rights.topic_is_published
).commit(committer_id, 'Created new topic rights', commit_cmds)
def get_topic_rights(topic_id, strict=True):
"""Retrieves the rights object for the given topic.
Args:
topic_id: str. ID of the topic.
strict: bool. Whether to fail noisily if no topic with a given id
exists in the datastore.
Returns:
TopicRights. The rights object associated with the given topic.
Raises:
EntityNotFoundError. The topic with ID topic_id was not
found in the datastore.
"""
model = topic_models.TopicRightsModel.get(topic_id, strict=strict)
if model is None:
return None
return get_topic_rights_from_model(model)
def get_topic_rights_with_user(user_id):
"""Retrieves the rights object for all topics assigned to given user.
Args:
user_id: str. ID of the user.
Returns:
list(TopicRights). The rights objects associated with the topics
assigned to given user.
"""
topic_rights_models = topic_models.TopicRightsModel.get_by_user(user_id)
return [
get_topic_rights_from_model(model) for model in topic_rights_models
if model is not None]
def get_all_topic_rights():
"""Returns the rights object of all topics present in the datastore.
Returns:
dict. The dict of rights objects of all topics present in
the datastore keyed by topic id.
"""
topic_rights_models = topic_models.TopicRightsModel.get_all()
topic_rights = {}
for model in topic_rights_models:
rights = get_topic_rights_from_model(model)
topic_rights[rights.id] = rights
return topic_rights
def check_can_edit_topic(user, topic_rights):
"""Checks whether the user can edit the given topic.
Args:
user: UserActionsInfo. Object having user_id, role and actions for
given user.
topic_rights: TopicRights or None. Rights object for the given topic.
Returns:
bool. Whether the given user can edit the given topic.
"""
if topic_rights is None:
return False
if role_services.ACTION_EDIT_ANY_TOPIC in user.actions:
return True
if role_services.ACTION_EDIT_OWNED_TOPIC not in user.actions:
return False
if topic_rights.is_manager(user.user_id):
return True
return False
def deassign_user_from_all_topics(committer, user_id):
"""Deassigns given user from all topics assigned to them.
Args:
committer: UserActionsInfo. UserActionsInfo object for the user
who is performing the action.
user_id: str. The ID of the user.
Raises:
Exception. The committer does not have rights to modify a role.
"""
topic_rights_list = get_topic_rights_with_user(user_id)
for topic_rights in topic_rights_list:
topic_rights.manager_ids.remove(user_id)
commit_cmds = [topic_domain.TopicRightsChange({
'cmd': topic_domain.CMD_REMOVE_MANAGER_ROLE,
'removed_user_id': user_id
})]
save_topic_rights(
topic_rights, committer.user_id,
'Removed all assigned topics from %s' % (user_id), commit_cmds)
def assign_role(committer, assignee, new_role, topic_id):
"""Assigns a new role to the user.
Args:
committer: UserActionsInfo. UserActionsInfo object for the user
who is performing the action.
assignee: UserActionsInfo. UserActionsInfo object for the user
whose role is being changed.
new_role: str. The name of the new role. Possible values are:
ROLE_MANAGER
topic_id: str. ID of the topic.
Raises:
Exception. The committer does not have rights to modify a role.
Exception. The assignee is already a manager for the topic.
Exception. The assignee doesn't have enough rights to become a manager.
Exception. The role is invalid.
"""
committer_id = committer.user_id
topic_rights = get_topic_rights(topic_id)
if (role_services.ACTION_MODIFY_ROLES_FOR_ANY_ACTIVITY not in
committer.actions):
logging.error(
'User %s tried to allow user %s to be a %s of topic %s '
'but was refused permission.' % (
committer_id, assignee.user_id, new_role, topic_id))
raise Exception(
'UnauthorizedUserException: Could not assign new role.')
assignee_username = user_services.get_username(assignee.user_id)
if role_services.ACTION_EDIT_OWNED_TOPIC not in assignee.actions:
raise Exception(
'The assignee doesn\'t have enough rights to become a manager.')
old_role = topic_domain.ROLE_NONE
if topic_rights.is_manager(assignee.user_id):
old_role = topic_domain.ROLE_MANAGER
if new_role == topic_domain.ROLE_MANAGER:
if topic_rights.is_manager(assignee.user_id):
raise Exception('This user already is a manager for this topic')
topic_rights.manager_ids.append(assignee.user_id)
elif new_role == topic_domain.ROLE_NONE:
if topic_rights.is_manager(assignee.user_id):
topic_rights.manager_ids.remove(assignee.user_id)
else:
old_role = topic_domain.ROLE_NONE
else:
raise Exception('Invalid role: %s' % new_role)
commit_message = 'Changed role of %s from %s to %s' % (
assignee_username, old_role, new_role)
commit_cmds = [topic_domain.TopicRightsChange({
'cmd': topic_domain.CMD_CHANGE_ROLE,
'assignee_id': assignee.user_id,
'old_role': old_role,
'new_role': new_role
})]
save_topic_rights(topic_rights, committer_id, commit_message, commit_cmds)
| 38.347328
| 114
| 0.685105
|
import collections
import copy
import logging
from core.domain import role_services
from core.domain import subtopic_page_domain
from core.domain import subtopic_page_services
from core.domain import topic_domain
from core.domain import user_services
from core.platform import models
import feconf
(topic_models,) = models.Registry.import_models([models.NAMES.topic])
datastore_services = models.Registry.import_datastore_services()
memcache_services = models.Registry.import_memcache_services()
def _migrate_subtopics_to_latest_schema(versioned_subtopics):
subtopic_schema_version = versioned_subtopics['schema_version']
if not (1 <= subtopic_schema_version
<= feconf.CURRENT_SUBTOPIC_SCHEMA_VERSION):
raise Exception(
'Sorry, we can only process v1-v%d subtopic schemas at '
'present.' % feconf.CURRENT_SUBTOPIC_SCHEMA_VERSION)
while (subtopic_schema_version <
feconf.CURRENT_SUBTOPIC_SCHEMA_VERSION):
topic_domain.Topic.update_subtopics_from_model(
versioned_subtopics, subtopic_schema_version)
subtopic_schema_version += 1
def _get_topic_memcache_key(topic_id, version=None):
if version:
return 'topic-version:%s:%s' % (topic_id, version)
else:
return 'topic:%s' % topic_id
def get_topic_from_model(topic_model):
versioned_subtopics = {
'schema_version': topic_model.subtopic_schema_version,
'subtopics': copy.deepcopy(topic_model.subtopics)
}
if (topic_model.subtopic_schema_version !=
feconf.CURRENT_SUBTOPIC_SCHEMA_VERSION):
_migrate_subtopics_to_latest_schema(versioned_subtopics)
return topic_domain.Topic(
topic_model.id, topic_model.name,
topic_model.description, topic_model.canonical_story_ids,
topic_model.additional_story_ids, topic_model.uncategorized_skill_ids,
[
topic_domain.Subtopic.from_dict(subtopic)
for subtopic in versioned_subtopics['subtopics']
],
versioned_subtopics['schema_version'],
topic_model.next_subtopic_id,
topic_model.language_code,
topic_model.version, topic_model.created_on,
topic_model.last_updated)
def get_all_topic_summaries():
topic_summaries_models = topic_models.TopicSummaryModel.get_all()
topic_summaries = [
get_topic_summary_from_model(summary)
for summary in topic_summaries_models]
return topic_summaries
def get_all_skill_ids_assigned_to_some_topic():
skill_ids = set([])
all_topic_models = topic_models.TopicModel.get_all()
all_topics = [get_topic_from_model(topic) for topic in all_topic_models]
for topic in all_topics:
skill_ids.update(topic.get_all_skill_ids())
return skill_ids
def get_topic_summary_from_model(topic_summary_model):
return topic_domain.TopicSummary(
topic_summary_model.id, topic_summary_model.name,
topic_summary_model.canonical_name,
topic_summary_model.language_code,
topic_summary_model.version,
topic_summary_model.canonical_story_count,
topic_summary_model.additional_story_count,
topic_summary_model.uncategorized_skill_count,
topic_summary_model.subtopic_count,
topic_summary_model.total_skill_count,
topic_summary_model.topic_model_created_on,
topic_summary_model.topic_model_last_updated
)
def get_topic_by_id(topic_id, strict=True, version=None):
topic_memcache_key = _get_topic_memcache_key(topic_id, version=version)
memcached_topic = memcache_services.get_multi(
[topic_memcache_key]).get(topic_memcache_key)
if memcached_topic is not None:
return memcached_topic
else:
topic_model = topic_models.TopicModel.get(
topic_id, strict=strict, version=version)
if topic_model:
topic = get_topic_from_model(topic_model)
memcache_services.set_multi({topic_memcache_key: topic})
return topic
else:
return None
def get_topics_by_ids(topic_ids):
all_topic_models = topic_models.TopicModel.get_multi(topic_ids)
topics = [
get_topic_from_model(topic_model) if topic_model is not None else None
for topic_model in all_topic_models]
return topics
def get_topic_by_name(topic_name):
topic_model = topic_models.TopicModel.get_by_name(topic_name)
if topic_model is None:
return None
topic = get_topic_from_model(topic_model)
return topic
def get_topic_summary_by_id(topic_id, strict=True):
topic_summary_model = topic_models.TopicSummaryModel.get(
topic_id, strict=strict)
if topic_summary_model:
topic_summary = get_topic_summary_from_model(topic_summary_model)
return topic_summary
else:
return None
def get_new_topic_id():
return topic_models.TopicModel.get_new_id('')
def _create_topic(committer_id, topic, commit_message, commit_cmds):
topic.validate()
create_new_topic_rights(topic.id, committer_id)
model = topic_models.TopicModel(
id=topic.id,
name=topic.name,
canonical_name=topic.canonical_name,
description=topic.description,
language_code=topic.language_code,
canonical_story_ids=topic.canonical_story_ids,
additional_story_ids=topic.additional_story_ids,
uncategorized_skill_ids=topic.uncategorized_skill_ids,
subtopic_schema_version=topic.subtopic_schema_version,
next_subtopic_id=topic.next_subtopic_id,
subtopics=[subtopic.to_dict() for subtopic in topic.subtopics]
)
commit_cmd_dicts = [commit_cmd.to_dict() for commit_cmd in commit_cmds]
model.commit(committer_id, commit_message, commit_cmd_dicts)
topic.version += 1
create_topic_summary(topic.id)
def save_new_topic(committer_id, topic):
existing_topic = get_topic_by_name(topic.name)
if existing_topic is not None:
raise Exception('Topic with name \'%s\' already exists' % topic.name)
commit_message = (
'New topic created with name \'%s\'.' % topic.name)
_create_topic(
committer_id, topic, commit_message, [topic_domain.TopicChange({
'cmd': topic_domain.CMD_CREATE_NEW,
'name': topic.name
})])
def apply_change_list(topic_id, change_list):
topic = get_topic_by_id(topic_id)
newly_created_subtopic_ids = []
existing_subtopic_page_ids_to_be_modified = []
deleted_subtopic_ids = []
modified_subtopic_pages_list = []
modified_subtopic_pages = {}
modified_subtopic_change_cmds = collections.defaultdict(list)
for change in change_list:
if (change.cmd ==
subtopic_page_domain.CMD_UPDATE_SUBTOPIC_PAGE_PROPERTY):
if change.subtopic_id < topic.next_subtopic_id:
existing_subtopic_page_ids_to_be_modified.append(
change.subtopic_id)
subtopic_page_id = (
subtopic_page_domain.SubtopicPage.get_subtopic_page_id(
topic_id, change.subtopic_id))
modified_subtopic_change_cmds[subtopic_page_id].append(
change)
modified_subtopic_pages_list = (
subtopic_page_services.get_subtopic_pages_with_ids(
topic_id, existing_subtopic_page_ids_to_be_modified))
for subtopic_page in modified_subtopic_pages_list:
modified_subtopic_pages[subtopic_page.id] = subtopic_page
try:
for change in change_list:
if change.cmd == topic_domain.CMD_ADD_SUBTOPIC:
topic.add_subtopic(change.subtopic_id, change.title)
subtopic_page_id = (
subtopic_page_domain.SubtopicPage.get_subtopic_page_id(
topic_id, change.subtopic_id))
modified_subtopic_pages[subtopic_page_id] = (
subtopic_page_domain.SubtopicPage.create_default_subtopic_page(
change.subtopic_id, topic_id)
)
modified_subtopic_change_cmds[subtopic_page_id].append(
subtopic_page_domain.SubtopicPageChange({
'cmd': 'create_new',
'topic_id': topic_id,
'subtopic_id': change.subtopic_id
}))
newly_created_subtopic_ids.append(change.subtopic_id)
elif change.cmd == topic_domain.CMD_DELETE_SUBTOPIC:
topic.delete_subtopic(change.subtopic_id)
if change.subtopic_id in newly_created_subtopic_ids:
raise Exception(
'The incoming changelist had simultaneous'
' creation and deletion of subtopics.')
deleted_subtopic_ids.append(change.subtopic_id)
elif change.cmd == topic_domain.CMD_ADD_UNCATEGORIZED_SKILL_ID:
topic.add_uncategorized_skill_id(
change.new_uncategorized_skill_id)
elif change.cmd == topic_domain.CMD_REMOVE_UNCATEGORIZED_SKILL_ID:
topic.remove_uncategorized_skill_id(
change.uncategorized_skill_id)
elif change.cmd == topic_domain.CMD_MOVE_SKILL_ID_TO_SUBTOPIC:
topic.move_skill_id_to_subtopic(
change.old_subtopic_id, change.new_subtopic_id,
change.skill_id)
elif change.cmd == topic_domain.CMD_REMOVE_SKILL_ID_FROM_SUBTOPIC:
topic.remove_skill_id_from_subtopic(
change.subtopic_id, change.skill_id)
elif change.cmd == topic_domain.CMD_UPDATE_TOPIC_PROPERTY:
if (change.property_name ==
topic_domain.TOPIC_PROPERTY_NAME):
topic.update_name(change.new_value)
elif (change.property_name ==
topic_domain.TOPIC_PROPERTY_DESCRIPTION):
topic.update_description(change.new_value)
elif (change.property_name ==
topic_domain.TOPIC_PROPERTY_CANONICAL_STORY_IDS):
topic.update_canonical_story_ids(change.new_value)
elif (change.property_name ==
topic_domain.TOPIC_PROPERTY_ADDITIONAL_STORY_IDS):
topic.update_additional_story_ids(change.new_value)
elif (change.property_name ==
topic_domain.TOPIC_PROPERTY_LANGUAGE_CODE):
topic.update_language_code(change.new_value)
elif (change.cmd ==
subtopic_page_domain.CMD_UPDATE_SUBTOPIC_PAGE_PROPERTY):
subtopic_page_id = (
subtopic_page_domain.SubtopicPage.get_subtopic_page_id(
topic_id, change.subtopic_id))
if ((modified_subtopic_pages[subtopic_page_id] is None) or
(change.subtopic_id in deleted_subtopic_ids)):
raise Exception(
'The subtopic with id %s doesn\'t exist' % (
change.subtopic_id))
if (change.property_name ==
subtopic_page_domain.
SUBTOPIC_PAGE_PROPERTY_PAGE_CONTENTS_HTML):
modified_subtopic_pages[
subtopic_page_id].update_page_contents_html(
change.new_value)
elif (change.property_name ==
subtopic_page_domain.
SUBTOPIC_PAGE_PROPERTY_PAGE_CONTENTS_AUDIO):
modified_subtopic_pages[
subtopic_page_id].update_page_contents_audio(
change.new_value)
elif change.cmd == topic_domain.CMD_UPDATE_SUBTOPIC_PROPERTY:
if (change.property_name ==
topic_domain.SUBTOPIC_PROPERTY_TITLE):
topic.update_subtopic_title(
change.subtopic_id, change.new_value)
elif (
change.cmd ==
topic_domain.CMD_MIGRATE_SUBTOPIC_SCHEMA_TO_LATEST_VERSION):
# Loading the topic model from the datastore into a
# Topic domain object automatically converts it to use the
# latest schema version. As a result, simply resaving the
# topic is sufficient to apply the schema migration.
continue
return (
topic, modified_subtopic_pages, deleted_subtopic_ids,
newly_created_subtopic_ids, modified_subtopic_change_cmds)
except Exception as e:
logging.error(
'%s %s %s %s' % (
e.__class__.__name__, e, topic_id, change_list)
)
raise
def _save_topic(committer_id, topic, commit_message, change_list):
if not change_list:
raise Exception(
'Unexpected error: received an invalid change list when trying to '
'save topic %s: %s' % (topic.id, change_list))
topic.validate()
topic_model = topic_models.TopicModel.get(topic.id, strict=False)
# Topic model cannot be None as topic is passed as parameter here and that
# is only possible if a topic model with that topic id exists. Also this is
# a private function and so it cannot be called independently with any
# topic object.
if topic.version > topic_model.version:
raise Exception(
'Unexpected error: trying to update version %s of topic '
'from version %s. Please reload the page and try again.'
% (topic_model.version, topic.version))
elif topic.version < topic_model.version:
raise Exception(
'Trying to update version %s of topic from version %s, '
'which is too old. Please reload the page and try again.'
% (topic_model.version, topic.version))
topic_model.description = topic.description
topic_model.name = topic.name
topic_model.canonical_story_ids = topic.canonical_story_ids
topic_model.additional_story_ids = topic.additional_story_ids
topic_model.uncategorized_skill_ids = topic.uncategorized_skill_ids
topic_model.subtopics = [subtopic.to_dict() for subtopic in topic.subtopics]
topic_model.subtopic_schema_version = topic.subtopic_schema_version
topic_model.next_subtopic_id = topic.next_subtopic_id
topic_model.language_code = topic.language_code
change_dicts = [change.to_dict() for change in change_list]
topic_model.commit(committer_id, commit_message, change_dicts)
memcache_services.delete(_get_topic_memcache_key(topic.id))
topic.version += 1
def update_topic_and_subtopic_pages(
committer_id, topic_id, change_list, commit_message):
if not commit_message:
raise ValueError(
'Expected a commit message, received none.')
(
updated_topic, updated_subtopic_pages_dict,
deleted_subtopic_ids, newly_created_subtopic_ids,
updated_subtopic_pages_change_cmds_dict
) = apply_change_list(topic_id, change_list)
_save_topic(
committer_id, updated_topic, commit_message, change_list
)
# The following loop deletes those subtopic pages that are already in the
# datastore, which are supposed to be deleted in the current changelist.
for subtopic_id in deleted_subtopic_ids:
if subtopic_id not in newly_created_subtopic_ids:
subtopic_page_services.delete_subtopic_page(
committer_id, topic_id, subtopic_id)
for subtopic_page_id in updated_subtopic_pages_dict:
subtopic_page = updated_subtopic_pages_dict[subtopic_page_id]
subtopic_page_change_list = updated_subtopic_pages_change_cmds_dict[
subtopic_page_id]
subtopic_id = subtopic_page.get_subtopic_id_from_subtopic_page_id()
# The following condition prevents the creation of subtopic pages that
# were deleted above.
if subtopic_id not in deleted_subtopic_ids:
subtopic_page_services.save_subtopic_page(
committer_id, subtopic_page, commit_message,
subtopic_page_change_list)
create_topic_summary(topic_id)
def delete_uncategorized_skill(user_id, topic_id, uncategorized_skill_id):
change_list = [topic_domain.TopicChange({
'cmd': 'remove_uncategorized_skill_id',
'uncategorized_skill_id': uncategorized_skill_id
})]
update_topic_and_subtopic_pages(
user_id, topic_id, change_list,
'Removed %s from uncategorized skill ids' % uncategorized_skill_id)
def add_uncategorized_skill(user_id, topic_id, uncategorized_skill_id):
change_list = [topic_domain.TopicChange({
'cmd': 'add_uncategorized_skill_id',
'new_uncategorized_skill_id': uncategorized_skill_id
})]
update_topic_and_subtopic_pages(
user_id, topic_id, change_list,
'Added %s to uncategorized skill ids' % uncategorized_skill_id)
def delete_story(user_id, topic_id, story_id):
topic = get_topic_by_id(topic_id)
old_canonical_story_ids = copy.deepcopy(topic.canonical_story_ids)
topic.delete_story(story_id)
change_list = [topic_domain.TopicChange({
'cmd': 'update_topic_property',
'property_name': 'canonical_story_ids',
'old_value': old_canonical_story_ids,
'new_value': topic.canonical_story_ids
})]
update_topic_and_subtopic_pages(
user_id, topic_id, change_list,
'Removed %s from canonical story ids' % story_id)
def add_canonical_story(user_id, topic_id, story_id):
topic = get_topic_by_id(topic_id)
old_canonical_story_ids = copy.deepcopy(topic.canonical_story_ids)
topic.add_canonical_story(story_id)
change_list = [topic_domain.TopicChange({
'cmd': 'update_topic_property',
'property_name': 'canonical_story_ids',
'old_value': old_canonical_story_ids,
'new_value': topic.canonical_story_ids
})]
update_topic_and_subtopic_pages(
user_id, topic_id, change_list,
'Added %s to canonical story ids' % story_id)
def delete_topic(committer_id, topic_id, force_deletion=False):
topic_rights_model = topic_models.TopicRightsModel.get(topic_id)
topic_rights_model.delete(
committer_id, feconf.COMMIT_MESSAGE_TOPIC_DELETED,
force_deletion=force_deletion)
# Delete the summary of the topic (regardless of whether
# force_deletion is True or not).
delete_topic_summary(topic_id)
topic_model = topic_models.TopicModel.get(topic_id)
for subtopic in topic_model.subtopics:
subtopic_page_services.delete_subtopic_page(
committer_id, topic_id, subtopic['id'])
topic_model.delete(
committer_id, feconf.COMMIT_MESSAGE_TOPIC_DELETED,
force_deletion=force_deletion)
# This must come after the topic is retrieved. Otherwise the memcache
# key will be reinstated.
topic_memcache_key = _get_topic_memcache_key(topic_id)
memcache_services.delete(topic_memcache_key)
def delete_topic_summary(topic_id):
topic_models.TopicSummaryModel.get(topic_id).delete()
def create_topic_summary(topic_id):
topic = get_topic_by_id(topic_id)
topic_summary = compute_summary_of_topic(topic)
save_topic_summary(topic_summary)
def compute_summary_of_topic(topic):
topic_model_canonical_story_count = len(topic.canonical_story_ids)
topic_model_additional_story_count = len(topic.additional_story_ids)
topic_model_uncategorized_skill_count = len(topic.uncategorized_skill_ids)
topic_model_subtopic_count = len(topic.subtopics)
total_skill_count = topic_model_uncategorized_skill_count
for subtopic in topic.subtopics:
total_skill_count = total_skill_count + len(subtopic.skill_ids)
topic_summary = topic_domain.TopicSummary(
topic.id, topic.name, topic.canonical_name, topic.language_code,
topic.version, topic_model_canonical_story_count,
topic_model_additional_story_count,
topic_model_uncategorized_skill_count, topic_model_subtopic_count,
total_skill_count, topic.created_on, topic.last_updated
)
return topic_summary
def save_topic_summary(topic_summary):
topic_summary_model = topic_models.TopicSummaryModel(
id=topic_summary.id,
name=topic_summary.name,
canonical_name=topic_summary.canonical_name,
language_code=topic_summary.language_code,
version=topic_summary.version,
additional_story_count=topic_summary.additional_story_count,
canonical_story_count=topic_summary.canonical_story_count,
uncategorized_skill_count=topic_summary.uncategorized_skill_count,
subtopic_count=topic_summary.subtopic_count,
total_skill_count=topic_summary.total_skill_count,
topic_model_last_updated=topic_summary.topic_model_last_updated,
topic_model_created_on=topic_summary.topic_model_created_on
)
topic_summary_model.put()
def get_topic_rights_from_model(topic_rights_model):
return topic_domain.TopicRights(
topic_rights_model.id,
topic_rights_model.manager_ids,
topic_rights_model.topic_is_published
)
def publish_topic(topic_id, committer_id):
topic_rights = get_topic_rights(topic_id, strict=False)
if topic_rights is None:
raise Exception('The given topic does not exist')
user = user_services.UserActionsInfo(committer_id)
if role_services.ACTION_CHANGE_TOPIC_STATUS not in user.actions:
raise Exception(
'The user does not have enough rights to publish the topic.')
if topic_rights.topic_is_published:
raise Exception('The topic is already published.')
topic_rights.topic_is_published = True
commit_cmds = [topic_domain.TopicRightsChange({
'cmd': topic_domain.CMD_PUBLISH_TOPIC
})]
save_topic_rights(
topic_rights, committer_id, 'Published the topic', commit_cmds)
def unpublish_topic(topic_id, committer_id):
topic_rights = get_topic_rights(topic_id, strict=False)
if topic_rights is None:
raise Exception('The given topic does not exist')
user = user_services.UserActionsInfo(committer_id)
if role_services.ACTION_CHANGE_TOPIC_STATUS not in user.actions:
raise Exception(
'The user does not have enough rights to unpublish the topic.')
if not topic_rights.topic_is_published:
raise Exception('The topic is already unpublished.')
topic_rights.topic_is_published = False
commit_cmds = [topic_domain.TopicRightsChange({
'cmd': topic_domain.CMD_UNPUBLISH_TOPIC
})]
save_topic_rights(
topic_rights, committer_id, 'Unpublished the topic', commit_cmds)
def save_topic_rights(topic_rights, committer_id, commit_message, commit_cmds):
model = topic_models.TopicRightsModel.get(topic_rights.id, strict=False)
model.manager_ids = topic_rights.manager_ids
model.topic_is_published = topic_rights.topic_is_published
commit_cmd_dicts = [commit_cmd.to_dict() for commit_cmd in commit_cmds]
model.commit(committer_id, commit_message, commit_cmd_dicts)
def create_new_topic_rights(topic_id, committer_id):
topic_rights = topic_domain.TopicRights(topic_id, [], False)
commit_cmds = [{'cmd': topic_domain.CMD_CREATE_NEW}]
topic_models.TopicRightsModel(
id=topic_rights.id,
manager_ids=topic_rights.manager_ids,
topic_is_published=topic_rights.topic_is_published
).commit(committer_id, 'Created new topic rights', commit_cmds)
def get_topic_rights(topic_id, strict=True):
model = topic_models.TopicRightsModel.get(topic_id, strict=strict)
if model is None:
return None
return get_topic_rights_from_model(model)
def get_topic_rights_with_user(user_id):
topic_rights_models = topic_models.TopicRightsModel.get_by_user(user_id)
return [
get_topic_rights_from_model(model) for model in topic_rights_models
if model is not None]
def get_all_topic_rights():
topic_rights_models = topic_models.TopicRightsModel.get_all()
topic_rights = {}
for model in topic_rights_models:
rights = get_topic_rights_from_model(model)
topic_rights[rights.id] = rights
return topic_rights
def check_can_edit_topic(user, topic_rights):
if topic_rights is None:
return False
if role_services.ACTION_EDIT_ANY_TOPIC in user.actions:
return True
if role_services.ACTION_EDIT_OWNED_TOPIC not in user.actions:
return False
if topic_rights.is_manager(user.user_id):
return True
return False
def deassign_user_from_all_topics(committer, user_id):
topic_rights_list = get_topic_rights_with_user(user_id)
for topic_rights in topic_rights_list:
topic_rights.manager_ids.remove(user_id)
commit_cmds = [topic_domain.TopicRightsChange({
'cmd': topic_domain.CMD_REMOVE_MANAGER_ROLE,
'removed_user_id': user_id
})]
save_topic_rights(
topic_rights, committer.user_id,
'Removed all assigned topics from %s' % (user_id), commit_cmds)
def assign_role(committer, assignee, new_role, topic_id):
committer_id = committer.user_id
topic_rights = get_topic_rights(topic_id)
if (role_services.ACTION_MODIFY_ROLES_FOR_ANY_ACTIVITY not in
committer.actions):
logging.error(
'User %s tried to allow user %s to be a %s of topic %s '
'but was refused permission.' % (
committer_id, assignee.user_id, new_role, topic_id))
raise Exception(
'UnauthorizedUserException: Could not assign new role.')
assignee_username = user_services.get_username(assignee.user_id)
if role_services.ACTION_EDIT_OWNED_TOPIC not in assignee.actions:
raise Exception(
'The assignee doesn\'t have enough rights to become a manager.')
old_role = topic_domain.ROLE_NONE
if topic_rights.is_manager(assignee.user_id):
old_role = topic_domain.ROLE_MANAGER
if new_role == topic_domain.ROLE_MANAGER:
if topic_rights.is_manager(assignee.user_id):
raise Exception('This user already is a manager for this topic')
topic_rights.manager_ids.append(assignee.user_id)
elif new_role == topic_domain.ROLE_NONE:
if topic_rights.is_manager(assignee.user_id):
topic_rights.manager_ids.remove(assignee.user_id)
else:
old_role = topic_domain.ROLE_NONE
else:
raise Exception('Invalid role: %s' % new_role)
commit_message = 'Changed role of %s from %s to %s' % (
assignee_username, old_role, new_role)
commit_cmds = [topic_domain.TopicRightsChange({
'cmd': topic_domain.CMD_CHANGE_ROLE,
'assignee_id': assignee.user_id,
'old_role': old_role,
'new_role': new_role
})]
save_topic_rights(topic_rights, committer_id, commit_message, commit_cmds)
| true
| true
|
1c47ecd35495ebe0a6ee65d47dce77083635a833
| 36,791
|
py
|
Python
|
stacker/providers/aws/default.py
|
CityGrid/stacker
|
87688453c64ef333c48786000f2f890cbe664633
|
[
"BSD-2-Clause"
] | 1
|
2018-07-17T11:23:47.000Z
|
2018-07-17T11:23:47.000Z
|
stacker/providers/aws/default.py
|
krm731/stacker
|
87688453c64ef333c48786000f2f890cbe664633
|
[
"BSD-2-Clause"
] | null | null | null |
stacker/providers/aws/default.py
|
krm731/stacker
|
87688453c64ef333c48786000f2f890cbe664633
|
[
"BSD-2-Clause"
] | 1
|
2020-02-29T04:49:04.000Z
|
2020-02-29T04:49:04.000Z
|
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from builtins import range
from builtins import object
import json
import yaml
import logging
import time
import urllib.parse
import sys
import botocore.exceptions
from botocore.config import Config
from ..base import BaseProvider
from ... import exceptions
from ...ui import ui
from stacker.session_cache import get_session
from ...actions.diff import (
DictValue,
diff_parameters,
format_params_diff as format_diff
)
logger = logging.getLogger(__name__)
# This value controls the maximum number of times a CloudFormation API call
# will be attempted, after being throttled. This value is used in an
# exponential backoff algorithm to determine how long the client should wait
# until attempting a retry:
#
# base * growth_factor ^ (attempts - 1)
#
# A value of 10 here would cause the worst case wait time for the last retry to
# be ~8 mins:
#
# 1 * 2 ^ (10 - 1) = 512 seconds
#
# References:
# https://github.com/boto/botocore/blob/1.6.1/botocore/retryhandler.py#L39-L58
# https://github.com/boto/botocore/blob/1.6.1/botocore/data/_retry.json#L97-L121
MAX_ATTEMPTS = 10
MAX_TAIL_RETRIES = 5
DEFAULT_CAPABILITIES = ["CAPABILITY_NAMED_IAM", ]
def get_cloudformation_client(session):
config = Config(
retries=dict(
max_attempts=MAX_ATTEMPTS
)
)
return session.client('cloudformation', config=config)
def get_output_dict(stack):
"""Returns a dict of key/values for the outputs for a given CF stack.
Args:
stack (dict): The stack object to get
outputs from.
Returns:
dict: A dictionary with key/values for each output on the stack.
"""
outputs = {}
if 'Outputs' not in stack:
return outputs
for output in stack['Outputs']:
logger.debug(" %s %s: %s", stack['StackName'], output['OutputKey'],
output['OutputValue'])
outputs[output['OutputKey']] = output['OutputValue']
return outputs
def s3_fallback(fqn, template, parameters, tags, method,
change_set_name=None, service_role=None):
logger.warn("DEPRECATION WARNING: Falling back to legacy "
"stacker S3 bucket region for templates. See "
"http://stacker.readthedocs.io/en/latest/config.html#s3-bucket"
" for more information.")
# extra line break on purpose to avoid status updates removing URL
# from view
logger.warn("\n")
logger.debug("Modifying the S3 TemplateURL to point to "
"us-east-1 endpoint")
template_url = template.url
template_url_parsed = urllib.parse.urlparse(template_url)
template_url_parsed = template_url_parsed._replace(
netloc="s3.amazonaws.com")
template_url = urllib.parse.urlunparse(template_url_parsed)
logger.debug("Using template_url: %s", template_url)
args = generate_cloudformation_args(
fqn, parameters, tags, template,
service_role=service_role,
change_set_name=get_change_set_name()
)
response = method(**args)
return response
def get_change_set_name():
"""Return a valid Change Set Name.
The name has to satisfy the following regex:
[a-zA-Z][-a-zA-Z0-9]*
And must be unique across all change sets.
"""
return 'change-set-{}'.format(int(time.time()))
def requires_replacement(changeset):
"""Return the changes within the changeset that require replacement.
Args:
changeset (list): List of changes
Returns:
list: A list of changes that require replacement, if any.
"""
return [r for r in changeset if r["ResourceChange"].get(
"Replacement", False) == "True"]
def ask_for_approval(full_changeset=None, params_diff=None,
include_verbose=False):
"""Prompt the user for approval to execute a change set.
Args:
full_changeset (list, optional): A list of the full changeset that will
be output if the user specifies verbose.
params_diff (list, optional): A list of DictValue detailing the
differences between two parameters returned by
:func:`stacker.actions.diff.diff_dictionaries`
include_verbose (bool, optional): Boolean for whether or not to include
the verbose option
"""
approval_options = ['y', 'n']
if include_verbose:
approval_options.append('v')
approve = ui.ask("Execute the above changes? [{}] ".format(
'/'.join(approval_options)))
if include_verbose and approve == "v":
if params_diff:
logger.info(
"Full changeset:\n\n%s\n%s",
format_params_diff(params_diff),
yaml.safe_dump(full_changeset),
)
else:
logger.info(
"Full changeset:\n%s",
yaml.safe_dump(full_changeset),
)
return ask_for_approval()
elif approve != "y":
raise exceptions.CancelExecution
def output_summary(fqn, action, changeset, params_diff,
replacements_only=False):
"""Log a summary of the changeset.
Args:
fqn (string): fully qualified name of the stack
action (string): action to include in the log message
changeset (list): AWS changeset
params_diff (list): A list of dictionaries detailing the differences
between two parameters returned by
:func:`stacker.actions.diff.diff_dictionaries`
replacements_only (bool, optional): boolean for whether or not we only
want to list replacements
"""
replacements = []
changes = []
for change in changeset:
resource = change['ResourceChange']
replacement = resource.get('Replacement') == 'True'
summary = '- %s %s (%s)' % (
resource['Action'],
resource['LogicalResourceId'],
resource['ResourceType'],
)
if replacement:
replacements.append(summary)
else:
changes.append(summary)
summary = ''
if params_diff:
summary += summarize_params_diff(params_diff)
if replacements:
if not replacements_only:
summary += 'Replacements:\n'
summary += '\n'.join(replacements)
if changes:
if summary:
summary += '\n'
summary += 'Changes:\n%s' % ('\n'.join(changes))
logger.info('%s %s:\n%s', fqn, action, summary)
def format_params_diff(params_diff):
""" Just a wrapper for stacker.actions.diff.format_params_diff
for testing purposes.
"""
return format_diff(params_diff)
def summarize_params_diff(params_diff):
summary = ''
added_summary = [v.key for v in params_diff
if v.status() is DictValue.ADDED]
if added_summary:
summary += 'Parameters Added: %s\n' % ', '.join(added_summary)
removed_summary = [v.key for v in params_diff
if v.status() is DictValue.REMOVED]
if removed_summary:
summary += 'Parameters Removed: %s\n' % ', '.join(removed_summary)
modified_summary = [v.key for v in params_diff
if v.status() is DictValue.MODIFIED]
if modified_summary:
summary += 'Parameters Modified: %s\n' % ', '.join(modified_summary)
return summary
def wait_till_change_set_complete(cfn_client, change_set_id, try_count=25,
sleep_time=.5, max_sleep=3):
""" Checks state of a changeset, returning when it is in a complete state.
Since changesets can take a little bit of time to get into a complete
state, we need to poll it until it does so. This will try to get the
state `try_count` times, waiting `sleep_time` * 2 seconds between each try
up to the `max_sleep` number of seconds. If, after that time, the changeset
is not in a complete state it fails. These default settings will wait a
little over one minute.
Args:
cfn_client (:class:`botocore.client.CloudFormation`): Used to query
cloudformation.
change_set_id (str): The unique changeset id to wait for.
try_count (int): Number of times to try the call.
sleep_time (int): Time to sleep between attempts.
max_sleep (int): Max time to sleep during backoff
Return:
dict: The response from cloudformation for the describe_change_set
call.
"""
complete = False
response = None
for i in range(try_count):
response = cfn_client.describe_change_set(
ChangeSetName=change_set_id,
)
complete = response["Status"] in ("FAILED", "CREATE_COMPLETE")
if complete:
break
if sleep_time == max_sleep:
logger.debug(
"Still waiting on changeset for another %s seconds",
sleep_time
)
time.sleep(sleep_time)
# exponential backoff with max
sleep_time = min(sleep_time * 2, max_sleep)
if not complete:
raise exceptions.ChangesetDidNotStabilize(change_set_id)
return response
def create_change_set(cfn_client, fqn, template, parameters, tags,
change_set_type='UPDATE', replacements_only=False,
service_role=None):
logger.debug("Attempting to create change set of type %s for stack: %s.",
change_set_type,
fqn)
args = generate_cloudformation_args(
fqn, parameters, tags, template,
change_set_type=change_set_type,
service_role=service_role,
change_set_name=get_change_set_name()
)
try:
response = cfn_client.create_change_set(**args)
except botocore.exceptions.ClientError as e:
if e.response['Error']['Message'] == ('TemplateURL must reference '
'a valid S3 object to which '
'you have access.'):
response = s3_fallback(fqn, template, parameters,
tags, cfn_client.create_change_set,
get_change_set_name(),
service_role)
else:
raise
change_set_id = response["Id"]
response = wait_till_change_set_complete(
cfn_client, change_set_id
)
status = response["Status"]
if status == "FAILED":
status_reason = response["StatusReason"]
if ("didn't contain changes" in response["StatusReason"] or
"No updates are to be performed" in response["StatusReason"]):
logger.debug(
"Stack %s did not change, not updating and removing "
"changeset.",
fqn,
)
cfn_client.delete_change_set(ChangeSetName=change_set_id)
raise exceptions.StackDidNotChange()
logger.warn(
"Got strange status, '%s' for changeset '%s'. Not deleting for "
"further investigation - you will need to delete the changeset "
"manually.",
status, change_set_id
)
raise exceptions.UnhandledChangeSetStatus(
fqn, change_set_id, status, status_reason
)
execution_status = response["ExecutionStatus"]
if execution_status != "AVAILABLE":
raise exceptions.UnableToExecuteChangeSet(fqn,
change_set_id,
execution_status)
changes = response["Changes"]
return changes, change_set_id
def check_tags_contain(actual, expected):
"""Check if a set of AWS resource tags is contained in another
Every tag key in `expected` must be present in `actual`, and have the same
value. Extra keys in `actual` but not in `expected` are ignored.
Args:
actual (list): Set of tags to be verified, usually from the description
of a resource. Each item must be a `dict` containing `Key` and
`Value` items.
expected (list): Set of tags that must be present in `actual` (in the
same format).
"""
actual_set = set((item["Key"], item["Value"]) for item in actual)
expected_set = set((item["Key"], item["Value"]) for item in expected)
return actual_set >= expected_set
def generate_cloudformation_args(stack_name, parameters, tags, template,
capabilities=DEFAULT_CAPABILITIES,
change_set_type=None,
service_role=None,
stack_policy=None,
change_set_name=None):
"""Used to generate the args for common cloudformation API interactions.
This is used for create_stack/update_stack/create_change_set calls in
cloudformation.
Args:
stack_name (str): The fully qualified stack name in Cloudformation.
parameters (list): A list of dictionaries that defines the
parameter list to be applied to the Cloudformation stack.
tags (list): A list of dictionaries that defines the tags
that should be applied to the Cloudformation stack.
template (:class:`stacker.provider.base.Template`): The template
object.
capabilities (list, optional): A list of capabilities to use when
updating Cloudformation.
change_set_type (str, optional): An optional change set type to use
with create_change_set.
service_role (str, optional): An optional service role to use when
interacting with Cloudformation.
change_set_name (str, optional): An optional change set name to use
with create_change_set.
Returns:
dict: A dictionary of arguments to be used in the Cloudformation API
call.
"""
args = {
"StackName": stack_name,
"Parameters": parameters,
"Tags": tags,
"Capabilities": capabilities,
}
if service_role:
args["RoleARN"] = service_role
if change_set_name:
args["ChangeSetName"] = change_set_name
if change_set_type:
args["ChangeSetType"] = change_set_type
if template.url:
args["TemplateURL"] = template.url
else:
args["TemplateBody"] = template.body
# When creating args for CreateChangeSet, don't include the stack policy,
# since ChangeSets don't support it.
if not change_set_name:
args.update(generate_stack_policy_args(stack_policy))
return args
def generate_stack_policy_args(stack_policy=None):
args = {}
if stack_policy:
logger.debug("Stack has a stack policy")
if stack_policy.url:
# stacker currently does not support uploading stack policies to
# S3, so this will never get hit (unless your implementing S3
# uploads, and then you're probably reading this comment about why
# the exception below was raised :))
#
# args["StackPolicyURL"] = stack_policy.url
raise NotImplementedError
else:
args["StackPolicyBody"] = stack_policy.body
return args
class ProviderBuilder(object):
"""Implements a ProviderBuilder for the AWS provider."""
def __init__(self, region=None, **kwargs):
self.region = region
self.kwargs = kwargs
def build(self, region=None, profile=None):
if not region:
region = self.region
session = get_session(region=region, profile=profile)
return Provider(session, region=region, **self.kwargs)
class Provider(BaseProvider):
"""AWS CloudFormation Provider"""
DELETED_STATUS = "DELETE_COMPLETE"
IN_PROGRESS_STATUSES = (
"CREATE_IN_PROGRESS",
"UPDATE_IN_PROGRESS",
"DELETE_IN_PROGRESS",
"UPDATE_COMPLETE_CLEANUP_IN_PROGRESS",
)
ROLLING_BACK_STATUSES = (
"ROLLBACK_IN_PROGRESS",
"UPDATE_ROLLBACK_IN_PROGRESS"
)
FAILED_STATUSES = (
"CREATE_FAILED",
"ROLLBACK_FAILED",
"ROLLBACK_COMPLETE",
"DELETE_FAILED",
"UPDATE_ROLLBACK_FAILED",
# Note: UPDATE_ROLLBACK_COMPLETE is in both the FAILED and COMPLETE
# sets, because we need to wait for it when a rollback is triggered,
# but still mark the stack as failed.
"UPDATE_ROLLBACK_COMPLETE",
)
COMPLETE_STATUSES = (
"CREATE_COMPLETE",
"DELETE_COMPLETE",
"UPDATE_COMPLETE",
"UPDATE_ROLLBACK_COMPLETE",
)
RECREATION_STATUSES = (
"CREATE_FAILED",
"ROLLBACK_FAILED",
"ROLLBACK_COMPLETE",
)
def __init__(self, session, region=None, interactive=False,
replacements_only=False, recreate_failed=False,
service_role=None, **kwargs):
self._outputs = {}
self.region = region
self.cloudformation = get_cloudformation_client(session)
self.interactive = interactive
# replacements only is only used in interactive mode
self.replacements_only = interactive and replacements_only
self.recreate_failed = interactive or recreate_failed
self.service_role = service_role
def get_stack(self, stack_name, **kwargs):
try:
return self.cloudformation.describe_stacks(
StackName=stack_name)['Stacks'][0]
except botocore.exceptions.ClientError as e:
if "does not exist" not in str(e):
raise
raise exceptions.StackDoesNotExist(stack_name)
def get_stack_status(self, stack, **kwargs):
return stack['StackStatus']
def is_stack_completed(self, stack, **kwargs):
return self.get_stack_status(stack) in self.COMPLETE_STATUSES
def is_stack_in_progress(self, stack, **kwargs):
return self.get_stack_status(stack) in self.IN_PROGRESS_STATUSES
def is_stack_destroyed(self, stack, **kwargs):
return self.get_stack_status(stack) == self.DELETED_STATUS
def is_stack_recreatable(self, stack, **kwargs):
return self.get_stack_status(stack) in self.RECREATION_STATUSES
def is_stack_rolling_back(self, stack, **kwargs):
return self.get_stack_status(stack) in self.ROLLING_BACK_STATUSES
def is_stack_failed(self, stack, **kwargs):
return self.get_stack_status(stack) in self.FAILED_STATUSES
def tail_stack(self, stack, cancel, retries=0, **kwargs):
def log_func(e):
event_args = [e['ResourceStatus'], e['ResourceType'],
e.get('ResourceStatusReason', None)]
# filter out any values that are empty
event_args = [arg for arg in event_args if arg]
template = " ".join(["[%s]"] + ["%s" for _ in event_args])
logger.info(template, *([stack.fqn] + event_args))
if not retries:
logger.info("Tailing stack: %s", stack.fqn)
try:
self.tail(stack.fqn,
cancel=cancel,
log_func=log_func,
include_initial=False)
except botocore.exceptions.ClientError as e:
if "does not exist" in str(e) and retries < MAX_TAIL_RETRIES:
# stack might be in the process of launching, wait for a second
# and try again
time.sleep(1)
self.tail_stack(stack, cancel, retries=retries + 1, **kwargs)
else:
raise
@staticmethod
def _tail_print(e):
print("%s %s %s" % (e['ResourceStatus'],
e['ResourceType'],
e['EventId']))
def get_events(self, stackname):
"""Get the events in batches and return in chronological order"""
next_token = None
event_list = []
while 1:
if next_token is not None:
events = self.cloudformation.describe_stack_events(
StackName=stackname, NextToken=next_token
)
else:
events = self.cloudformation.describe_stack_events(
StackName=stackname
)
event_list.append(events['StackEvents'])
next_token = events.get('NextToken', None)
if next_token is None:
break
time.sleep(1)
return reversed(sum(event_list, []))
def tail(self, stack_name, cancel, log_func=_tail_print, sleep_time=5,
include_initial=True):
"""Show and then tail the event log"""
# First dump the full list of events in chronological order and keep
# track of the events we've seen already
seen = set()
initial_events = self.get_events(stack_name)
for e in initial_events:
if include_initial:
log_func(e)
seen.add(e['EventId'])
# Now keep looping through and dump the new events
while 1:
events = self.get_events(stack_name)
for e in events:
if e['EventId'] not in seen:
log_func(e)
seen.add(e['EventId'])
if cancel.wait(sleep_time):
return
def destroy_stack(self, stack, **kwargs):
logger.debug("Destroying stack: %s" % (self.get_stack_name(stack)))
args = {"StackName": self.get_stack_name(stack)}
if self.service_role:
args["RoleARN"] = self.service_role
self.cloudformation.delete_stack(**args)
return True
def create_stack(self, fqn, template, parameters, tags,
force_change_set=False, stack_policy=None,
**kwargs):
"""Create a new Cloudformation stack.
Args:
fqn (str): The fully qualified name of the Cloudformation stack.
template (:class:`stacker.providers.base.Template`): A Template
object to use when creating the stack.
parameters (list): A list of dictionaries that defines the
parameter list to be applied to the Cloudformation stack.
tags (list): A list of dictionaries that defines the tags
that should be applied to the Cloudformation stack.
force_change_set (bool): Whether or not to force change set use.
"""
logger.debug("Attempting to create stack %s:.", fqn)
logger.debug(" parameters: %s", parameters)
logger.debug(" tags: %s", tags)
if template.url:
logger.debug(" template_url: %s", template.url)
else:
logger.debug(" no template url, uploading template "
"directly.")
if force_change_set:
logger.debug("force_change_set set to True, creating stack with "
"changeset.")
_changes, change_set_id = create_change_set(
self.cloudformation, fqn, template, parameters, tags,
'CREATE', service_role=self.service_role, **kwargs
)
self.cloudformation.execute_change_set(
ChangeSetName=change_set_id,
)
else:
args = generate_cloudformation_args(
fqn, parameters, tags, template,
service_role=self.service_role,
stack_policy=stack_policy,
)
try:
self.cloudformation.create_stack(**args)
except botocore.exceptions.ClientError as e:
if e.response['Error']['Message'] == ('TemplateURL must '
'reference a valid S3 '
'object to which you '
'have access.'):
s3_fallback(fqn, template, parameters, tags,
self.cloudformation.create_stack,
self.service_role)
else:
raise
def select_update_method(self, force_interactive, force_change_set):
"""Select the correct update method when updating a stack.
Args:
force_interactive (str): Whether or not to force interactive mode
no matter what mode the provider is in.
force_change_set (bool): Whether or not to force change set use.
Returns:
function: The correct object method to use when updating.
"""
if self.interactive or force_interactive:
return self.interactive_update_stack
elif force_change_set:
return self.noninteractive_changeset_update
else:
return self.default_update_stack
def prepare_stack_for_update(self, stack, tags):
"""Prepare a stack for updating
It may involve deleting the stack if is has failed it's initial
creation. The deletion is only allowed if:
- The stack contains all the tags configured in the current context;
- The stack is in one of the statuses considered safe to re-create
- ``recreate_failed`` is enabled, due to either being explicitly
enabled by the user, or because interactive mode is on.
Args:
stack (dict): a stack object returned from get_stack
tags (list): list of expected tags that must be present in the
stack if it must be re-created
Returns:
bool: True if the stack can be updated, False if it must be
re-created
"""
if self.is_stack_destroyed(stack):
return False
elif self.is_stack_completed(stack):
return True
stack_name = self.get_stack_name(stack)
stack_status = self.get_stack_status(stack)
if self.is_stack_in_progress(stack):
raise exceptions.StackUpdateBadStatus(
stack_name, stack_status,
'Update already in-progress')
if not self.is_stack_recreatable(stack):
raise exceptions.StackUpdateBadStatus(
stack_name, stack_status,
'Unsupported state for re-creation')
if not self.recreate_failed:
raise exceptions.StackUpdateBadStatus(
stack_name, stack_status,
'Stack re-creation is disabled. Run stacker again with the '
'--recreate-failed option to force it to be deleted and '
'created from scratch.')
stack_tags = self.get_stack_tags(stack)
if not check_tags_contain(stack_tags, tags):
raise exceptions.StackUpdateBadStatus(
stack_name, stack_status,
'Tags differ from current configuration, possibly not created '
'with stacker')
if self.interactive:
sys.stdout.write(
'The \"%s\" stack is in a failed state (%s).\n'
'It cannot be updated, but it can be deleted and re-created.\n'
'All its current resources will IRREVERSIBLY DESTROYED.\n'
'Proceed carefully!\n\n' % (stack_name, stack_status))
sys.stdout.flush()
ask_for_approval(include_verbose=False)
logger.warn('Destroying stack \"%s\" for re-creation', stack_name)
self.destroy_stack(stack)
return False
def update_stack(self, fqn, template, old_parameters, parameters, tags,
force_interactive=False, force_change_set=False,
stack_policy=None, **kwargs):
"""Update a Cloudformation stack.
Args:
fqn (str): The fully qualified name of the Cloudformation stack.
template (:class:`stacker.providers.base.Template`): A Template
object to use when updating the stack.
old_parameters (list): A list of dictionaries that defines the
parameter list on the existing Cloudformation stack.
parameters (list): A list of dictionaries that defines the
parameter list to be applied to the Cloudformation stack.
tags (list): A list of dictionaries that defines the tags
that should be applied to the Cloudformation stack.
force_interactive (bool): A flag that indicates whether the update
should be interactive. If set to True, interactive mode will
be used no matter if the provider is in interactive mode or
not. False will follow the behavior of the provider.
force_change_set (bool): A flag that indicates whether the update
must be executed with a change set.
"""
logger.debug("Attempting to update stack %s:", fqn)
logger.debug(" parameters: %s", parameters)
logger.debug(" tags: %s", tags)
if template.url:
logger.debug(" template_url: %s", template.url)
else:
logger.debug(" no template url, uploading template directly.")
update_method = self.select_update_method(force_interactive,
force_change_set)
return update_method(fqn, template, old_parameters, parameters, tags,
stack_policy=stack_policy, **kwargs)
def interactive_update_stack(self, fqn, template, old_parameters,
parameters, tags, stack_policy=None,
**kwargs):
"""Update a Cloudformation stack in interactive mode.
Args:
fqn (str): The fully qualified name of the Cloudformation stack.
template (:class:`stacker.providers.base.Template`): A Template
object to use when updating the stack.
old_parameters (list): A list of dictionaries that defines the
parameter list on the existing Cloudformation stack.
parameters (list): A list of dictionaries that defines the
parameter list to be applied to the Cloudformation stack.
tags (list): A list of dictionaries that defines the tags
that should be applied to the Cloudformation stack.
"""
logger.debug("Using interactive provider mode for %s.", fqn)
changes, change_set_id = create_change_set(
self.cloudformation, fqn, template, parameters, tags,
'UPDATE', service_role=self.service_role, **kwargs
)
params_diff = diff_parameters(
self.params_as_dict(old_parameters),
self.params_as_dict(parameters))
action = "replacements" if self.replacements_only else "changes"
full_changeset = changes
if self.replacements_only:
changes = requires_replacement(changes)
if changes or params_diff:
ui.lock()
try:
output_summary(fqn, action, changes, params_diff,
replacements_only=self.replacements_only)
ask_for_approval(
full_changeset=full_changeset,
params_diff=params_diff,
include_verbose=True,
)
finally:
ui.unlock()
# ChangeSets don't support specifying a stack policy inline, like
# CreateStack/UpdateStack, so we just SetStackPolicy if there is one.
if stack_policy:
kwargs = generate_stack_policy_args(stack_policy)
kwargs["StackName"] = fqn
self.cloudformation.set_stack_policy(**kwargs)
self.cloudformation.execute_change_set(
ChangeSetName=change_set_id,
)
def noninteractive_changeset_update(self, fqn, template, old_parameters,
parameters, tags, **kwargs):
"""Update a Cloudformation stack using a change set.
This is required for stacks with a defined Transform (i.e. SAM), as the
default update_stack API cannot be used with them.
Args:
fqn (str): The fully qualified name of the Cloudformation stack.
template (:class:`stacker.providers.base.Template`): A Template
object to use when updating the stack.
old_parameters (list): A list of dictionaries that defines the
parameter list on the existing Cloudformation stack.
parameters (list): A list of dictionaries that defines the
parameter list to be applied to the Cloudformation stack.
tags (list): A list of dictionaries that defines the tags
that should be applied to the Cloudformation stack.
"""
logger.debug("Using noninterative changeset provider mode "
"for %s.", fqn)
_changes, change_set_id = create_change_set(
self.cloudformation, fqn, template, parameters, tags,
'UPDATE', service_role=self.service_role, **kwargs
)
self.cloudformation.execute_change_set(
ChangeSetName=change_set_id,
)
def default_update_stack(self, fqn, template, old_parameters, parameters,
tags, stack_policy=None, **kwargs):
"""Update a Cloudformation stack in default mode.
Args:
fqn (str): The fully qualified name of the Cloudformation stack.
template (:class:`stacker.providers.base.Template`): A Template
object to use when updating the stack.
old_parameters (list): A list of dictionaries that defines the
parameter list on the existing Cloudformation stack.
parameters (list): A list of dictionaries that defines the
parameter list to be applied to the Cloudformation stack.
tags (list): A list of dictionaries that defines the tags
that should be applied to the Cloudformation stack.
"""
logger.debug("Using default provider mode for %s.", fqn)
args = generate_cloudformation_args(
fqn, parameters, tags, template,
service_role=self.service_role,
stack_policy=stack_policy,
)
try:
self.cloudformation.update_stack(**args)
except botocore.exceptions.ClientError as e:
if "No updates are to be performed." in str(e):
logger.debug(
"Stack %s did not change, not updating.",
fqn,
)
raise exceptions.StackDidNotChange
elif e.response['Error']['Message'] == ('TemplateURL must '
'reference a valid '
'S3 object to which '
'you have access.'):
s3_fallback(fqn, template, parameters, tags,
self.cloudformation.update_stack,
self.service_role)
else:
raise
def get_stack_name(self, stack, **kwargs):
return stack['StackName']
def get_stack_tags(self, stack, **kwargs):
return stack['Tags']
def get_outputs(self, stack_name, *args, **kwargs):
if stack_name not in self._outputs:
stack = self.get_stack(stack_name)
self._outputs[stack_name] = get_output_dict(stack)
return self._outputs[stack_name]
def get_output_dict(self, stack):
return get_output_dict(stack)
def get_stack_info(self, stack):
""" Get the template and parameters of the stack currently in AWS
Returns [ template, parameters ]
"""
stack_name = stack['StackId']
try:
template = self.cloudformation.get_template(
StackName=stack_name)['TemplateBody']
except botocore.exceptions.ClientError as e:
if "does not exist" not in str(e):
raise
raise exceptions.StackDoesNotExist(stack_name)
parameters = self.params_as_dict(stack.get('Parameters', []))
return [json.dumps(template), parameters]
@staticmethod
def params_as_dict(parameters_list):
parameters = dict()
for p in parameters_list:
parameters[p['ParameterKey']] = p['ParameterValue']
return parameters
| 37.850823
| 80
| 0.60561
|
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from builtins import range
from builtins import object
import json
import yaml
import logging
import time
import urllib.parse
import sys
import botocore.exceptions
from botocore.config import Config
from ..base import BaseProvider
from ... import exceptions
from ...ui import ui
from stacker.session_cache import get_session
from ...actions.diff import (
DictValue,
diff_parameters,
format_params_diff as format_diff
)
logger = logging.getLogger(__name__)
10
MAX_TAIL_RETRIES = 5
DEFAULT_CAPABILITIES = ["CAPABILITY_NAMED_IAM", ]
def get_cloudformation_client(session):
config = Config(
retries=dict(
max_attempts=MAX_ATTEMPTS
)
)
return session.client('cloudformation', config=config)
def get_output_dict(stack):
outputs = {}
if 'Outputs' not in stack:
return outputs
for output in stack['Outputs']:
logger.debug(" %s %s: %s", stack['StackName'], output['OutputKey'],
output['OutputValue'])
outputs[output['OutputKey']] = output['OutputValue']
return outputs
def s3_fallback(fqn, template, parameters, tags, method,
change_set_name=None, service_role=None):
logger.warn("DEPRECATION WARNING: Falling back to legacy "
"stacker S3 bucket region for templates. See "
"http://stacker.readthedocs.io/en/latest/config.html#s3-bucket"
" for more information.")
logger.warn("\n")
logger.debug("Modifying the S3 TemplateURL to point to "
"us-east-1 endpoint")
template_url = template.url
template_url_parsed = urllib.parse.urlparse(template_url)
template_url_parsed = template_url_parsed._replace(
netloc="s3.amazonaws.com")
template_url = urllib.parse.urlunparse(template_url_parsed)
logger.debug("Using template_url: %s", template_url)
args = generate_cloudformation_args(
fqn, parameters, tags, template,
service_role=service_role,
change_set_name=get_change_set_name()
)
response = method(**args)
return response
def get_change_set_name():
return 'change-set-{}'.format(int(time.time()))
def requires_replacement(changeset):
return [r for r in changeset if r["ResourceChange"].get(
"Replacement", False) == "True"]
def ask_for_approval(full_changeset=None, params_diff=None,
include_verbose=False):
approval_options = ['y', 'n']
if include_verbose:
approval_options.append('v')
approve = ui.ask("Execute the above changes? [{}] ".format(
'/'.join(approval_options)))
if include_verbose and approve == "v":
if params_diff:
logger.info(
"Full changeset:\n\n%s\n%s",
format_params_diff(params_diff),
yaml.safe_dump(full_changeset),
)
else:
logger.info(
"Full changeset:\n%s",
yaml.safe_dump(full_changeset),
)
return ask_for_approval()
elif approve != "y":
raise exceptions.CancelExecution
def output_summary(fqn, action, changeset, params_diff,
replacements_only=False):
replacements = []
changes = []
for change in changeset:
resource = change['ResourceChange']
replacement = resource.get('Replacement') == 'True'
summary = '- %s %s (%s)' % (
resource['Action'],
resource['LogicalResourceId'],
resource['ResourceType'],
)
if replacement:
replacements.append(summary)
else:
changes.append(summary)
summary = ''
if params_diff:
summary += summarize_params_diff(params_diff)
if replacements:
if not replacements_only:
summary += 'Replacements:\n'
summary += '\n'.join(replacements)
if changes:
if summary:
summary += '\n'
summary += 'Changes:\n%s' % ('\n'.join(changes))
logger.info('%s %s:\n%s', fqn, action, summary)
def format_params_diff(params_diff):
return format_diff(params_diff)
def summarize_params_diff(params_diff):
summary = ''
added_summary = [v.key for v in params_diff
if v.status() is DictValue.ADDED]
if added_summary:
summary += 'Parameters Added: %s\n' % ', '.join(added_summary)
removed_summary = [v.key for v in params_diff
if v.status() is DictValue.REMOVED]
if removed_summary:
summary += 'Parameters Removed: %s\n' % ', '.join(removed_summary)
modified_summary = [v.key for v in params_diff
if v.status() is DictValue.MODIFIED]
if modified_summary:
summary += 'Parameters Modified: %s\n' % ', '.join(modified_summary)
return summary
def wait_till_change_set_complete(cfn_client, change_set_id, try_count=25,
sleep_time=.5, max_sleep=3):
complete = False
response = None
for i in range(try_count):
response = cfn_client.describe_change_set(
ChangeSetName=change_set_id,
)
complete = response["Status"] in ("FAILED", "CREATE_COMPLETE")
if complete:
break
if sleep_time == max_sleep:
logger.debug(
"Still waiting on changeset for another %s seconds",
sleep_time
)
time.sleep(sleep_time)
sleep_time = min(sleep_time * 2, max_sleep)
if not complete:
raise exceptions.ChangesetDidNotStabilize(change_set_id)
return response
def create_change_set(cfn_client, fqn, template, parameters, tags,
change_set_type='UPDATE', replacements_only=False,
service_role=None):
logger.debug("Attempting to create change set of type %s for stack: %s.",
change_set_type,
fqn)
args = generate_cloudformation_args(
fqn, parameters, tags, template,
change_set_type=change_set_type,
service_role=service_role,
change_set_name=get_change_set_name()
)
try:
response = cfn_client.create_change_set(**args)
except botocore.exceptions.ClientError as e:
if e.response['Error']['Message'] == ('TemplateURL must reference '
'a valid S3 object to which '
'you have access.'):
response = s3_fallback(fqn, template, parameters,
tags, cfn_client.create_change_set,
get_change_set_name(),
service_role)
else:
raise
change_set_id = response["Id"]
response = wait_till_change_set_complete(
cfn_client, change_set_id
)
status = response["Status"]
if status == "FAILED":
status_reason = response["StatusReason"]
if ("didn't contain changes" in response["StatusReason"] or
"No updates are to be performed" in response["StatusReason"]):
logger.debug(
"Stack %s did not change, not updating and removing "
"changeset.",
fqn,
)
cfn_client.delete_change_set(ChangeSetName=change_set_id)
raise exceptions.StackDidNotChange()
logger.warn(
"Got strange status, '%s' for changeset '%s'. Not deleting for "
"further investigation - you will need to delete the changeset "
"manually.",
status, change_set_id
)
raise exceptions.UnhandledChangeSetStatus(
fqn, change_set_id, status, status_reason
)
execution_status = response["ExecutionStatus"]
if execution_status != "AVAILABLE":
raise exceptions.UnableToExecuteChangeSet(fqn,
change_set_id,
execution_status)
changes = response["Changes"]
return changes, change_set_id
def check_tags_contain(actual, expected):
actual_set = set((item["Key"], item["Value"]) for item in actual)
expected_set = set((item["Key"], item["Value"]) for item in expected)
return actual_set >= expected_set
def generate_cloudformation_args(stack_name, parameters, tags, template,
capabilities=DEFAULT_CAPABILITIES,
change_set_type=None,
service_role=None,
stack_policy=None,
change_set_name=None):
args = {
"StackName": stack_name,
"Parameters": parameters,
"Tags": tags,
"Capabilities": capabilities,
}
if service_role:
args["RoleARN"] = service_role
if change_set_name:
args["ChangeSetName"] = change_set_name
if change_set_type:
args["ChangeSetType"] = change_set_type
if template.url:
args["TemplateURL"] = template.url
else:
args["TemplateBody"] = template.body
# When creating args for CreateChangeSet, don't include the stack policy,
if not change_set_name:
args.update(generate_stack_policy_args(stack_policy))
return args
def generate_stack_policy_args(stack_policy=None):
args = {}
if stack_policy:
logger.debug("Stack has a stack policy")
if stack_policy.url:
# stacker currently does not support uploading stack policies to
# S3, so this will never get hit (unless your implementing S3
# uploads, and then you're probably reading this comment about why
raise NotImplementedError
else:
args["StackPolicyBody"] = stack_policy.body
return args
class ProviderBuilder(object):
def __init__(self, region=None, **kwargs):
self.region = region
self.kwargs = kwargs
def build(self, region=None, profile=None):
if not region:
region = self.region
session = get_session(region=region, profile=profile)
return Provider(session, region=region, **self.kwargs)
class Provider(BaseProvider):
DELETED_STATUS = "DELETE_COMPLETE"
IN_PROGRESS_STATUSES = (
"CREATE_IN_PROGRESS",
"UPDATE_IN_PROGRESS",
"DELETE_IN_PROGRESS",
"UPDATE_COMPLETE_CLEANUP_IN_PROGRESS",
)
ROLLING_BACK_STATUSES = (
"ROLLBACK_IN_PROGRESS",
"UPDATE_ROLLBACK_IN_PROGRESS"
)
FAILED_STATUSES = (
"CREATE_FAILED",
"ROLLBACK_FAILED",
"ROLLBACK_COMPLETE",
"DELETE_FAILED",
"UPDATE_ROLLBACK_FAILED",
"UPDATE_ROLLBACK_COMPLETE",
)
COMPLETE_STATUSES = (
"CREATE_COMPLETE",
"DELETE_COMPLETE",
"UPDATE_COMPLETE",
"UPDATE_ROLLBACK_COMPLETE",
)
RECREATION_STATUSES = (
"CREATE_FAILED",
"ROLLBACK_FAILED",
"ROLLBACK_COMPLETE",
)
def __init__(self, session, region=None, interactive=False,
replacements_only=False, recreate_failed=False,
service_role=None, **kwargs):
self._outputs = {}
self.region = region
self.cloudformation = get_cloudformation_client(session)
self.interactive = interactive
self.replacements_only = interactive and replacements_only
self.recreate_failed = interactive or recreate_failed
self.service_role = service_role
def get_stack(self, stack_name, **kwargs):
try:
return self.cloudformation.describe_stacks(
StackName=stack_name)['Stacks'][0]
except botocore.exceptions.ClientError as e:
if "does not exist" not in str(e):
raise
raise exceptions.StackDoesNotExist(stack_name)
def get_stack_status(self, stack, **kwargs):
return stack['StackStatus']
def is_stack_completed(self, stack, **kwargs):
return self.get_stack_status(stack) in self.COMPLETE_STATUSES
def is_stack_in_progress(self, stack, **kwargs):
return self.get_stack_status(stack) in self.IN_PROGRESS_STATUSES
def is_stack_destroyed(self, stack, **kwargs):
return self.get_stack_status(stack) == self.DELETED_STATUS
def is_stack_recreatable(self, stack, **kwargs):
return self.get_stack_status(stack) in self.RECREATION_STATUSES
def is_stack_rolling_back(self, stack, **kwargs):
return self.get_stack_status(stack) in self.ROLLING_BACK_STATUSES
def is_stack_failed(self, stack, **kwargs):
return self.get_stack_status(stack) in self.FAILED_STATUSES
def tail_stack(self, stack, cancel, retries=0, **kwargs):
def log_func(e):
event_args = [e['ResourceStatus'], e['ResourceType'],
e.get('ResourceStatusReason', None)]
event_args = [arg for arg in event_args if arg]
template = " ".join(["[%s]"] + ["%s" for _ in event_args])
logger.info(template, *([stack.fqn] + event_args))
if not retries:
logger.info("Tailing stack: %s", stack.fqn)
try:
self.tail(stack.fqn,
cancel=cancel,
log_func=log_func,
include_initial=False)
except botocore.exceptions.ClientError as e:
if "does not exist" in str(e) and retries < MAX_TAIL_RETRIES:
time.sleep(1)
self.tail_stack(stack, cancel, retries=retries + 1, **kwargs)
else:
raise
@staticmethod
def _tail_print(e):
print("%s %s %s" % (e['ResourceStatus'],
e['ResourceType'],
e['EventId']))
def get_events(self, stackname):
next_token = None
event_list = []
while 1:
if next_token is not None:
events = self.cloudformation.describe_stack_events(
StackName=stackname, NextToken=next_token
)
else:
events = self.cloudformation.describe_stack_events(
StackName=stackname
)
event_list.append(events['StackEvents'])
next_token = events.get('NextToken', None)
if next_token is None:
break
time.sleep(1)
return reversed(sum(event_list, []))
def tail(self, stack_name, cancel, log_func=_tail_print, sleep_time=5,
include_initial=True):
seen = set()
initial_events = self.get_events(stack_name)
for e in initial_events:
if include_initial:
log_func(e)
seen.add(e['EventId'])
# Now keep looping through and dump the new events
while 1:
events = self.get_events(stack_name)
for e in events:
if e['EventId'] not in seen:
log_func(e)
seen.add(e['EventId'])
if cancel.wait(sleep_time):
return
def destroy_stack(self, stack, **kwargs):
logger.debug("Destroying stack: %s" % (self.get_stack_name(stack)))
args = {"StackName": self.get_stack_name(stack)}
if self.service_role:
args["RoleARN"] = self.service_role
self.cloudformation.delete_stack(**args)
return True
def create_stack(self, fqn, template, parameters, tags,
force_change_set=False, stack_policy=None,
**kwargs):
logger.debug("Attempting to create stack %s:.", fqn)
logger.debug(" parameters: %s", parameters)
logger.debug(" tags: %s", tags)
if template.url:
logger.debug(" template_url: %s", template.url)
else:
logger.debug(" no template url, uploading template "
"directly.")
if force_change_set:
logger.debug("force_change_set set to True, creating stack with "
"changeset.")
_changes, change_set_id = create_change_set(
self.cloudformation, fqn, template, parameters, tags,
'CREATE', service_role=self.service_role, **kwargs
)
self.cloudformation.execute_change_set(
ChangeSetName=change_set_id,
)
else:
args = generate_cloudformation_args(
fqn, parameters, tags, template,
service_role=self.service_role,
stack_policy=stack_policy,
)
try:
self.cloudformation.create_stack(**args)
except botocore.exceptions.ClientError as e:
if e.response['Error']['Message'] == ('TemplateURL must '
'reference a valid S3 '
'object to which you '
'have access.'):
s3_fallback(fqn, template, parameters, tags,
self.cloudformation.create_stack,
self.service_role)
else:
raise
def select_update_method(self, force_interactive, force_change_set):
if self.interactive or force_interactive:
return self.interactive_update_stack
elif force_change_set:
return self.noninteractive_changeset_update
else:
return self.default_update_stack
def prepare_stack_for_update(self, stack, tags):
if self.is_stack_destroyed(stack):
return False
elif self.is_stack_completed(stack):
return True
stack_name = self.get_stack_name(stack)
stack_status = self.get_stack_status(stack)
if self.is_stack_in_progress(stack):
raise exceptions.StackUpdateBadStatus(
stack_name, stack_status,
'Update already in-progress')
if not self.is_stack_recreatable(stack):
raise exceptions.StackUpdateBadStatus(
stack_name, stack_status,
'Unsupported state for re-creation')
if not self.recreate_failed:
raise exceptions.StackUpdateBadStatus(
stack_name, stack_status,
'Stack re-creation is disabled. Run stacker again with the '
'--recreate-failed option to force it to be deleted and '
'created from scratch.')
stack_tags = self.get_stack_tags(stack)
if not check_tags_contain(stack_tags, tags):
raise exceptions.StackUpdateBadStatus(
stack_name, stack_status,
'Tags differ from current configuration, possibly not created '
'with stacker')
if self.interactive:
sys.stdout.write(
'The \"%s\" stack is in a failed state (%s).\n'
'It cannot be updated, but it can be deleted and re-created.\n'
'All its current resources will IRREVERSIBLY DESTROYED.\n'
'Proceed carefully!\n\n' % (stack_name, stack_status))
sys.stdout.flush()
ask_for_approval(include_verbose=False)
logger.warn('Destroying stack \"%s\" for re-creation', stack_name)
self.destroy_stack(stack)
return False
def update_stack(self, fqn, template, old_parameters, parameters, tags,
force_interactive=False, force_change_set=False,
stack_policy=None, **kwargs):
logger.debug("Attempting to update stack %s:", fqn)
logger.debug(" parameters: %s", parameters)
logger.debug(" tags: %s", tags)
if template.url:
logger.debug(" template_url: %s", template.url)
else:
logger.debug(" no template url, uploading template directly.")
update_method = self.select_update_method(force_interactive,
force_change_set)
return update_method(fqn, template, old_parameters, parameters, tags,
stack_policy=stack_policy, **kwargs)
def interactive_update_stack(self, fqn, template, old_parameters,
parameters, tags, stack_policy=None,
**kwargs):
logger.debug("Using interactive provider mode for %s.", fqn)
changes, change_set_id = create_change_set(
self.cloudformation, fqn, template, parameters, tags,
'UPDATE', service_role=self.service_role, **kwargs
)
params_diff = diff_parameters(
self.params_as_dict(old_parameters),
self.params_as_dict(parameters))
action = "replacements" if self.replacements_only else "changes"
full_changeset = changes
if self.replacements_only:
changes = requires_replacement(changes)
if changes or params_diff:
ui.lock()
try:
output_summary(fqn, action, changes, params_diff,
replacements_only=self.replacements_only)
ask_for_approval(
full_changeset=full_changeset,
params_diff=params_diff,
include_verbose=True,
)
finally:
ui.unlock()
# ChangeSets don't support specifying a stack policy inline, like
if stack_policy:
kwargs = generate_stack_policy_args(stack_policy)
kwargs["StackName"] = fqn
self.cloudformation.set_stack_policy(**kwargs)
self.cloudformation.execute_change_set(
ChangeSetName=change_set_id,
)
def noninteractive_changeset_update(self, fqn, template, old_parameters,
parameters, tags, **kwargs):
logger.debug("Using noninterative changeset provider mode "
"for %s.", fqn)
_changes, change_set_id = create_change_set(
self.cloudformation, fqn, template, parameters, tags,
'UPDATE', service_role=self.service_role, **kwargs
)
self.cloudformation.execute_change_set(
ChangeSetName=change_set_id,
)
def default_update_stack(self, fqn, template, old_parameters, parameters,
tags, stack_policy=None, **kwargs):
logger.debug("Using default provider mode for %s.", fqn)
args = generate_cloudformation_args(
fqn, parameters, tags, template,
service_role=self.service_role,
stack_policy=stack_policy,
)
try:
self.cloudformation.update_stack(**args)
except botocore.exceptions.ClientError as e:
if "No updates are to be performed." in str(e):
logger.debug(
"Stack %s did not change, not updating.",
fqn,
)
raise exceptions.StackDidNotChange
elif e.response['Error']['Message'] == ('TemplateURL must '
'reference a valid '
'S3 object to which '
'you have access.'):
s3_fallback(fqn, template, parameters, tags,
self.cloudformation.update_stack,
self.service_role)
else:
raise
def get_stack_name(self, stack, **kwargs):
return stack['StackName']
def get_stack_tags(self, stack, **kwargs):
return stack['Tags']
def get_outputs(self, stack_name, *args, **kwargs):
if stack_name not in self._outputs:
stack = self.get_stack(stack_name)
self._outputs[stack_name] = get_output_dict(stack)
return self._outputs[stack_name]
def get_output_dict(self, stack):
return get_output_dict(stack)
def get_stack_info(self, stack):
stack_name = stack['StackId']
try:
template = self.cloudformation.get_template(
StackName=stack_name)['TemplateBody']
except botocore.exceptions.ClientError as e:
if "does not exist" not in str(e):
raise
raise exceptions.StackDoesNotExist(stack_name)
parameters = self.params_as_dict(stack.get('Parameters', []))
return [json.dumps(template), parameters]
@staticmethod
def params_as_dict(parameters_list):
parameters = dict()
for p in parameters_list:
parameters[p['ParameterKey']] = p['ParameterValue']
return parameters
| true
| true
|
1c47eda88b7276191251c02fb6fc7111b8dec436
| 1,147
|
py
|
Python
|
wxpython/02-temperature-converter.py
|
kurtmckee/learning
|
fa8dc08ee0d81b71d3ad569a02b390e7dee35d5a
|
[
"MIT"
] | null | null | null |
wxpython/02-temperature-converter.py
|
kurtmckee/learning
|
fa8dc08ee0d81b71d3ad569a02b390e7dee35d5a
|
[
"MIT"
] | null | null | null |
wxpython/02-temperature-converter.py
|
kurtmckee/learning
|
fa8dc08ee0d81b71d3ad569a02b390e7dee35d5a
|
[
"MIT"
] | null | null | null |
import wx
class Converter(wx.Frame):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
panel = wx.Panel(self)
sizer = wx.BoxSizer()
panel.SetSizer(sizer)
self.input_box = wx.TextCtrl(panel, style=wx.TE_PROCESS_ENTER)
self.input_box.Bind(wx.EVT_TEXT_ENTER, self.calculate)
sizer.Add(self.input_box)
button = wx.Button(panel, label='F --> C')
button.Bind(wx.EVT_BUTTON, self.calculate)
sizer.Add(button)
self.output_label = wx.StaticText(panel, -1, label='--- C')
sizer.Add(self.output_label)
def calculate(self, event):
try:
f = int(self.input_box.GetValue())
except ValueError:
self.output_label.SetLabel('Invalid')
return
c = (f - 32) * 5 / 9
if c < -273.15:
self.output_label.SetLabel('Impossibly cold!')
else:
self.output_label.SetLabel(f'{c:.1f} C')
app = wx.App()
frame = Converter(None, title='Temperature Converter')
frame.Show()
app.MainLoop()
| 26.674419
| 71
| 0.558849
|
import wx
class Converter(wx.Frame):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
panel = wx.Panel(self)
sizer = wx.BoxSizer()
panel.SetSizer(sizer)
self.input_box = wx.TextCtrl(panel, style=wx.TE_PROCESS_ENTER)
self.input_box.Bind(wx.EVT_TEXT_ENTER, self.calculate)
sizer.Add(self.input_box)
button = wx.Button(panel, label='F --> C')
button.Bind(wx.EVT_BUTTON, self.calculate)
sizer.Add(button)
self.output_label = wx.StaticText(panel, -1, label='--- C')
sizer.Add(self.output_label)
def calculate(self, event):
try:
f = int(self.input_box.GetValue())
except ValueError:
self.output_label.SetLabel('Invalid')
return
c = (f - 32) * 5 / 9
if c < -273.15:
self.output_label.SetLabel('Impossibly cold!')
else:
self.output_label.SetLabel(f'{c:.1f} C')
app = wx.App()
frame = Converter(None, title='Temperature Converter')
frame.Show()
app.MainLoop()
| true
| true
|
1c47ee58a2b4e8bd47920723aea259e394c0a8c6
| 3,278
|
py
|
Python
|
tests/test_edgeql_datatypes.py
|
mcaramma/edgedb
|
53b18dbaf7407617ca135d1f8a5047bda6414654
|
[
"Apache-2.0"
] | 2
|
2019-12-09T12:52:58.000Z
|
2020-02-20T15:20:22.000Z
|
tests/test_edgeql_datatypes.py
|
1st1/edgedb
|
3e234aede215d4fc517be9397a25bb16e5f1ace3
|
[
"Apache-2.0"
] | null | null | null |
tests/test_edgeql_datatypes.py
|
1st1/edgedb
|
3e234aede215d4fc517be9397a25bb16e5f1ace3
|
[
"Apache-2.0"
] | null | null | null |
#
# This source file is part of the EdgeDB open source project.
#
# Copyright 2012-present MagicStack Inc. and the EdgeDB authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import unittest # NOQA
from edb.server import _testbase as tb
from edb.client import exceptions as exc
class TestEdgeQLDT(tb.QueryTestCase):
SETUP = '''
CREATE MIGRATION default::m TO eschema $$
scalar type seq_t extending sequence
scalar type seq2_t extending sequence
type Obj:
property seq_prop -> seq_t
type Obj2:
property seq_prop -> seq2_t
$$;
COMMIT MIGRATION default::m;
'''
async def test_edgeql_dt_datetime_01(self):
await self.assert_query_result('''
SELECT <datetime>'2017-10-10' + <timedelta>'1 day';
SELECT <timedelta>'1 day' + <datetime>'2017-10-10';
SELECT <datetime>'2017-10-10' - <timedelta>'1 day';
SELECT <timedelta>'1 day' + <timedelta>'1 day';
SELECT <timedelta>'4 days' - <timedelta>'1 day';
''', [
['2017-10-11T00:00:00+00:00'],
['2017-10-11T00:00:00+00:00'],
['2017-10-09T00:00:00+00:00'],
['2 days'],
['3 days'],
])
with self.assertRaisesRegex(
exc.EdgeQLError,
'operator `-` is not defined for types.*timedelta.*datetime'):
await self.con.execute("""
SELECT <timedelta>'1 day' - <datetime>'2017-10-10';
""")
async def test_edgeql_dt_datetime_02(self):
await self.assert_query_result('''
SELECT <str><datetime>'2017-10-10';
SELECT <str>(<datetime>'2017-10-10' - <timedelta>'1 day');
''', [
['2017-10-10T00:00:00+00:00'],
['2017-10-09T00:00:00+00:00'],
])
@unittest.expectedFailure
async def test_edgeql_dt_datetime_03(self):
await self.assert_query_result('''
SELECT <tuple<str,datetime>>('foo', '2020-10-10');
SELECT (<tuple<str,datetime>>('foo', '2020-10-10')).1 +
<timedelta>'1 month';
''', [
[{'foo': '2020-10-10T00:00:00+00:00'}],
['2020-11-10T00:00:00+00:00'],
])
async def test_edgeql_dt_sequence_01(self):
await self.assert_query_result('''
INSERT Obj;
INSERT Obj;
INSERT Obj2;
SELECT Obj { seq_prop } ORDER BY Obj.seq_prop;
SELECT Obj2 { seq_prop };
''', [
[1],
[1],
[1],
[
{'seq_prop': 1}, {'seq_prop': 2}
],
[
{'seq_prop': 1}
],
])
| 31.825243
| 78
| 0.552776
|
import unittest
from edb.server import _testbase as tb
from edb.client import exceptions as exc
class TestEdgeQLDT(tb.QueryTestCase):
SETUP = '''
CREATE MIGRATION default::m TO eschema $$
scalar type seq_t extending sequence
scalar type seq2_t extending sequence
type Obj:
property seq_prop -> seq_t
type Obj2:
property seq_prop -> seq2_t
$$;
COMMIT MIGRATION default::m;
'''
async def test_edgeql_dt_datetime_01(self):
await self.assert_query_result('''
SELECT <datetime>'2017-10-10' + <timedelta>'1 day';
SELECT <timedelta>'1 day' + <datetime>'2017-10-10';
SELECT <datetime>'2017-10-10' - <timedelta>'1 day';
SELECT <timedelta>'1 day' + <timedelta>'1 day';
SELECT <timedelta>'4 days' - <timedelta>'1 day';
''', [
['2017-10-11T00:00:00+00:00'],
['2017-10-11T00:00:00+00:00'],
['2017-10-09T00:00:00+00:00'],
['2 days'],
['3 days'],
])
with self.assertRaisesRegex(
exc.EdgeQLError,
'operator `-` is not defined for types.*timedelta.*datetime'):
await self.con.execute("""
SELECT <timedelta>'1 day' - <datetime>'2017-10-10';
""")
async def test_edgeql_dt_datetime_02(self):
await self.assert_query_result('''
SELECT <str><datetime>'2017-10-10';
SELECT <str>(<datetime>'2017-10-10' - <timedelta>'1 day');
''', [
['2017-10-10T00:00:00+00:00'],
['2017-10-09T00:00:00+00:00'],
])
@unittest.expectedFailure
async def test_edgeql_dt_datetime_03(self):
await self.assert_query_result('''
SELECT <tuple<str,datetime>>('foo', '2020-10-10');
SELECT (<tuple<str,datetime>>('foo', '2020-10-10')).1 +
<timedelta>'1 month';
''', [
[{'foo': '2020-10-10T00:00:00+00:00'}],
['2020-11-10T00:00:00+00:00'],
])
async def test_edgeql_dt_sequence_01(self):
await self.assert_query_result('''
INSERT Obj;
INSERT Obj;
INSERT Obj2;
SELECT Obj { seq_prop } ORDER BY Obj.seq_prop;
SELECT Obj2 { seq_prop };
''', [
[1],
[1],
[1],
[
{'seq_prop': 1}, {'seq_prop': 2}
],
[
{'seq_prop': 1}
],
])
| true
| true
|
1c47ee6d294ce288e1a41b88f9ca63742633f99f
| 37,265
|
py
|
Python
|
tests/testsuite.py
|
felixxm/unittest-xml-reporting
|
0ef90a6f2565430c4e8c19b4b4741a971a8b4041
|
[
"BSD-2-Clause-FreeBSD"
] | 212
|
2015-01-08T13:32:40.000Z
|
2022-03-31T21:32:23.000Z
|
tests/testsuite.py
|
felixxm/unittest-xml-reporting
|
0ef90a6f2565430c4e8c19b4b4741a971a8b4041
|
[
"BSD-2-Clause-FreeBSD"
] | 169
|
2015-01-22T20:50:17.000Z
|
2022-03-23T06:23:08.000Z
|
tests/testsuite.py
|
felixxm/unittest-xml-reporting
|
0ef90a6f2565430c4e8c19b4b4741a971a8b4041
|
[
"BSD-2-Clause-FreeBSD"
] | 104
|
2015-01-16T19:50:36.000Z
|
2022-03-18T20:49:16.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Executable module to test unittest-xml-reporting.
"""
from __future__ import print_function
import contextlib
import io
import sys
from xmlrunner.unittest import unittest
import xmlrunner
from xmlrunner.result import _DuplicateWriter
from xmlrunner.result import _XMLTestResult
from xmlrunner.result import resolve_filename
import doctest
import tests.doctest_example
from io import StringIO, BytesIO
from tempfile import mkdtemp
from tempfile import mkstemp
from shutil import rmtree
from glob import glob
from xml.dom import minidom
from lxml import etree
import os
import os.path
from unittest import mock
def _load_schema(version):
path = os.path.join(
os.path.dirname(__file__),
'vendor/jenkins/xunit-plugin', version, 'junit-10.xsd')
with open(path, 'r') as schema_file:
schema_doc = etree.parse(schema_file)
schema = etree.XMLSchema(schema_doc)
return schema
raise RuntimeError('Could not load JUnit schema') # pragma: no cover
def validate_junit_report(version, text):
document = etree.parse(BytesIO(text))
schema = _load_schema(version)
schema.assertValid(document)
class DoctestTest(unittest.TestCase):
def test_doctest_example(self):
suite = doctest.DocTestSuite(tests.doctest_example)
outdir = BytesIO()
stream = StringIO()
runner = xmlrunner.XMLTestRunner(
stream=stream, output=outdir, verbosity=0)
runner.run(suite)
outdir.seek(0)
output = outdir.read()
self.assertIn('classname="tests.doctest_example.Multiplicator"'.encode('utf8'), output)
self.assertIn('name="threetimes"'.encode('utf8'), output)
self.assertIn('classname="tests.doctest_example"'.encode('utf8'), output)
self.assertIn('name="twice"'.encode('utf8'), output)
@contextlib.contextmanager
def capture_stdout_stderr():
"""
context manager to capture stdout and stderr
"""
orig_stdout = sys.stdout
orig_stderr = sys.stderr
sys.stdout = StringIO()
sys.stderr = StringIO()
try:
yield (sys.stdout, sys.stderr)
finally:
sys.stdout = orig_stdout
sys.stderr = orig_stderr
def _strip_xml(xml, changes):
doc = etree.fromstring(xml)
for xpath, attributes in changes.items():
for node in doc.xpath(xpath):
for attrib in node.attrib.keys():
if attrib not in attributes:
del node.attrib[attrib]
return etree.tostring(doc)
def some_decorator(f):
# for issue #195
code = """\
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
"""
evaldict = dict(func=f)
exec(code, evaldict)
return evaldict['wrapper']
class XMLTestRunnerTestCase(unittest.TestCase):
"""
XMLTestRunner test case.
"""
class DummyTest(unittest.TestCase):
@unittest.skip("demonstrating skipping")
def test_skip(self):
pass # pragma: no cover
@unittest.skip(u"demonstrating non-ascii skipping: éçà")
def test_non_ascii_skip(self):
pass # pragma: no cover
def test_pass(self):
pass
def test_fail(self):
self.assertTrue(False)
@unittest.expectedFailure
def test_expected_failure(self):
self.assertTrue(False)
@unittest.expectedFailure
def test_unexpected_success(self):
pass
def test_error(self):
1 / 0
def test_cdata_section(self):
print('<![CDATA[content]]>')
def test_invalid_xml_chars_in_doc(self):
"""
Testing comments, -- is not allowed, or invalid xml 1.0 chars such as \x0c
"""
pass
def test_non_ascii_error(self):
self.assertEqual(u"éçà", 42)
def test_unsafe_unicode(self):
print(u"A\x00B\x08C\x0BD\x0C")
def test_output_stdout_and_stderr(self):
print('test on stdout')
print('test on stderr', file=sys.stderr)
def test_runner_buffer_output_pass(self):
print('should not be printed')
def test_runner_buffer_output_fail(self):
print('should be printed')
self.fail('expected to fail')
def test_output(self):
print('test message')
def test_non_ascii_runner_buffer_output_fail(self):
print(u'Where is the café ?')
self.fail(u'The café could not be found')
class DummySubTest(unittest.TestCase):
def test_subTest_pass(self):
for i in range(2):
with self.subTest(i=i):
pass
def test_subTest_fail(self):
for i in range(2):
with self.subTest(i=i):
self.fail('this is a subtest.')
def test_subTest_error(self):
for i in range(2):
with self.subTest(i=i):
raise Exception('this is a subtest')
def test_subTest_mixed(self):
for i in range(2):
with self.subTest(i=i):
self.assertLess(i, 1, msg='this is a subtest.')
def test_subTest_with_dots(self):
for i in range(2):
with self.subTest(module='hello.world.subTest{}'.format(i)):
self.fail('this is a subtest.')
class DecoratedUnitTest(unittest.TestCase):
@some_decorator
def test_pass(self):
pass
class DummyErrorInCallTest(unittest.TestCase):
def __call__(self, result):
try:
raise Exception('Massive fail')
except Exception:
result.addError(self, sys.exc_info())
return
def test_pass(self):
# it is expected not to be called.
pass # pragma: no cover
class DummyRefCountTest(unittest.TestCase):
class dummy(object):
pass
def test_fail(self):
inst = self.dummy()
self.assertTrue(False)
def setUp(self):
self.stream = StringIO()
self.outdir = mkdtemp()
self.verbosity = 0
self.runner_kwargs = {}
self.addCleanup(rmtree, self.outdir)
def _test_xmlrunner(self, suite, runner=None, outdir=None):
if outdir is None:
outdir = self.outdir
stream = self.stream
verbosity = self.verbosity
runner_kwargs = self.runner_kwargs
if runner is None:
runner = xmlrunner.XMLTestRunner(
stream=stream, output=outdir, verbosity=verbosity,
**runner_kwargs)
if isinstance(outdir, BytesIO):
self.assertFalse(outdir.getvalue())
else:
self.assertEqual(0, len(glob(os.path.join(outdir, '*xml'))))
runner.run(suite)
if isinstance(outdir, BytesIO):
self.assertTrue(outdir.getvalue())
else:
self.assertEqual(1, len(glob(os.path.join(outdir, '*xml'))))
return runner
def test_basic_unittest_constructs(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
suite.addTest(self.DummyTest('test_skip'))
suite.addTest(self.DummyTest('test_fail'))
suite.addTest(self.DummyTest('test_expected_failure'))
suite.addTest(self.DummyTest('test_unexpected_success'))
suite.addTest(self.DummyTest('test_error'))
self._test_xmlrunner(suite)
def test_classnames(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
suite.addTest(self.DummySubTest('test_subTest_pass'))
outdir = BytesIO()
stream = StringIO()
runner = xmlrunner.XMLTestRunner(
stream=stream, output=outdir, verbosity=0)
runner.run(suite)
outdir.seek(0)
output = outdir.read()
output = _strip_xml(output, {
'//testsuite': (),
'//testcase': ('classname', 'name'),
'//failure': ('message',),
})
self.assertRegexpMatches(
output,
r'classname="tests\.testsuite\.(XMLTestRunnerTestCase\.)?'
r'DummyTest" name="test_pass"'.encode('utf8'),
)
self.assertRegexpMatches(
output,
r'classname="tests\.testsuite\.(XMLTestRunnerTestCase\.)?'
r'DummySubTest" name="test_subTest_pass"'.encode('utf8'),
)
def test_expected_failure(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_expected_failure'))
outdir = BytesIO()
self._test_xmlrunner(suite, outdir=outdir)
self.assertNotIn(b'<failure', outdir.getvalue())
self.assertNotIn(b'<error', outdir.getvalue())
self.assertIn(b'<skip', outdir.getvalue())
def test_unexpected_success(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_unexpected_success'))
outdir = BytesIO()
self._test_xmlrunner(suite, outdir=outdir)
self.assertNotIn(b'<failure', outdir.getvalue())
self.assertIn(b'<error', outdir.getvalue())
self.assertNotIn(b'<skip', outdir.getvalue())
def test_xmlrunner_non_ascii(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_non_ascii_skip'))
suite.addTest(self.DummyTest('test_non_ascii_error'))
outdir = BytesIO()
runner = xmlrunner.XMLTestRunner(
stream=self.stream, output=outdir, verbosity=self.verbosity,
**self.runner_kwargs)
runner.run(suite)
outdir.seek(0)
output = outdir.read()
self.assertIn(
u'message="demonstrating non-ascii skipping: éçà"'.encode('utf8'),
output)
def test_xmlrunner_safe_xml_encoding_name(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
outdir = BytesIO()
runner = xmlrunner.XMLTestRunner(
stream=self.stream, output=outdir, verbosity=self.verbosity,
**self.runner_kwargs)
runner.run(suite)
outdir.seek(0)
output = outdir.read()
firstline = output.splitlines()[0]
# test for issue #74
self.assertIn('encoding="UTF-8"'.encode('utf8'), firstline)
def test_xmlrunner_check_for_valid_xml_streamout(self):
"""
This test checks if the xml document is valid if there are more than
one testsuite and the output of the report is a single stream.
"""
class DummyTestA(unittest.TestCase):
def test_pass(self):
pass
class DummyTestB(unittest.TestCase):
def test_pass(self):
pass
suite = unittest.TestSuite()
suite.addTest(unittest.TestLoader().loadTestsFromTestCase(DummyTestA))
suite.addTest(unittest.TestLoader().loadTestsFromTestCase(DummyTestB))
outdir = BytesIO()
runner = xmlrunner.XMLTestRunner(
stream=self.stream, output=outdir, verbosity=self.verbosity,
**self.runner_kwargs)
runner.run(suite)
outdir.seek(0)
output = outdir.read()
# Finally check if we have a valid XML document or not.
try:
minidom.parseString(output)
except Exception as e: # pragma: no cover
# note: we could remove the try/except, but it's more crude.
self.fail(e)
def test_xmlrunner_unsafe_unicode(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_unsafe_unicode'))
outdir = BytesIO()
runner = xmlrunner.XMLTestRunner(
stream=self.stream, output=outdir, verbosity=self.verbosity,
**self.runner_kwargs)
runner.run(suite)
outdir.seek(0)
output = outdir.read()
self.assertIn(u"<![CDATA[ABCD\n]]>".encode('utf8'),
output)
def test_xmlrunner_non_ascii_failures(self):
self._xmlrunner_non_ascii_failures()
def test_xmlrunner_non_ascii_failures_buffered_output(self):
self._xmlrunner_non_ascii_failures(buffer=True)
def _xmlrunner_non_ascii_failures(self, buffer=False):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest(
'test_non_ascii_runner_buffer_output_fail'))
outdir = BytesIO()
runner = xmlrunner.XMLTestRunner(
stream=self.stream, output=outdir, verbosity=self.verbosity,
buffer=buffer, **self.runner_kwargs)
# allow output non-ascii letters to stdout
orig_stdout = sys.stdout
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8')
try:
runner.run(suite)
finally:
# Not to be closed when TextIOWrapper is disposed.
sys.stdout.detach()
sys.stdout = orig_stdout
outdir.seek(0)
output = outdir.read()
self.assertIn(
u'Where is the café ?'.encode('utf8'),
output)
self.assertIn(
u'The café could not be found'.encode('utf8'),
output)
@unittest.expectedFailure
def test_xmlrunner_buffer_output_pass(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_runner_buffer_output_pass'))
self._test_xmlrunner(suite)
testsuite_output = self.stream.getvalue()
# Since we are always buffering stdout/stderr
# it is currently troublesome to print anything at all
# and be consistent with --buffer option (issue #59)
self.assertIn('should not be printed', testsuite_output)
# this will be fixed when using the composite approach
# that was under development in the rewrite branch.
def test_xmlrunner_buffer_output_fail(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_runner_buffer_output_fail'))
# --buffer option
self.runner_kwargs['buffer'] = True
self._test_xmlrunner(suite)
testsuite_output = self.stream.getvalue()
self.assertIn('should be printed', testsuite_output)
def test_xmlrunner_output_without_buffer(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_output'))
with capture_stdout_stderr() as r:
self._test_xmlrunner(suite)
output_from_test = r[0].getvalue()
self.assertIn('test message', output_from_test)
def test_xmlrunner_output_with_buffer(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_output'))
# --buffer option
self.runner_kwargs['buffer'] = True
with capture_stdout_stderr() as r:
self._test_xmlrunner(suite)
output_from_test = r[0].getvalue()
self.assertNotIn('test message', output_from_test)
def test_xmlrunner_stdout_stderr_recovered_without_buffer(self):
orig_stdout = sys.stdout
orig_stderr = sys.stderr
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
self._test_xmlrunner(suite)
self.assertIs(orig_stdout, sys.stdout)
self.assertIs(orig_stderr, sys.stderr)
def test_xmlrunner_stdout_stderr_recovered_with_buffer(self):
orig_stdout = sys.stdout
orig_stderr = sys.stderr
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
# --buffer option
self.runner_kwargs['buffer'] = True
self._test_xmlrunner(suite)
self.assertIs(orig_stdout, sys.stdout)
self.assertIs(orig_stderr, sys.stderr)
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
@unittest.skipIf(not hasattr(unittest.TestCase, 'subTest'),
'unittest.TestCase.subTest not present.')
def test_unittest_subTest_fail(self):
# test for issue #77
outdir = BytesIO()
runner = xmlrunner.XMLTestRunner(
stream=self.stream, output=outdir, verbosity=self.verbosity,
**self.runner_kwargs)
suite = unittest.TestSuite()
suite.addTest(self.DummySubTest('test_subTest_fail'))
runner.run(suite)
outdir.seek(0)
output = outdir.read()
output = _strip_xml(output, {
'//testsuite': (),
'//testcase': ('classname', 'name'),
'//failure': ('message',),
})
self.assertRegexpMatches(
output,
br'<testcase classname="tests\.testsuite\.'
br'(XMLTestRunnerTestCase\.)?DummySubTest" '
br'name="test_subTest_fail \(i=0\)"')
self.assertRegexpMatches(
output,
br'<testcase classname="tests\.testsuite\.'
br'(XMLTestRunnerTestCase\.)?DummySubTest" '
br'name="test_subTest_fail \(i=1\)"')
@unittest.skipIf(not hasattr(unittest.TestCase, 'subTest'),
'unittest.TestCase.subTest not present.')
def test_unittest_subTest_error(self):
# test for issue #155
outdir = BytesIO()
runner = xmlrunner.XMLTestRunner(
stream=self.stream, output=outdir, verbosity=self.verbosity,
**self.runner_kwargs)
suite = unittest.TestSuite()
suite.addTest(self.DummySubTest('test_subTest_error'))
runner.run(suite)
outdir.seek(0)
output = outdir.read()
output = _strip_xml(output, {
'//testsuite': (),
'//testcase': ('classname', 'name'),
'//failure': ('message',),
})
self.assertRegexpMatches(
output,
br'<testcase classname="tests\.testsuite\.'
br'(XMLTestRunnerTestCase\.)?DummySubTest" '
br'name="test_subTest_error \(i=0\)"')
self.assertRegexpMatches(
output,
br'<testcase classname="tests\.testsuite\.'
br'(XMLTestRunnerTestCase\.)?DummySubTest" '
br'name="test_subTest_error \(i=1\)"')
@unittest.skipIf(not hasattr(unittest.TestCase, 'subTest'),
'unittest.TestCase.subTest not present.')
def test_unittest_subTest_mixed(self):
# test for issue #155
outdir = BytesIO()
runner = xmlrunner.XMLTestRunner(
stream=self.stream, output=outdir, verbosity=self.verbosity,
**self.runner_kwargs)
suite = unittest.TestSuite()
suite.addTest(self.DummySubTest('test_subTest_mixed'))
runner.run(suite)
outdir.seek(0)
output = outdir.read()
output = _strip_xml(output, {
'//testsuite': (),
'//testcase': ('classname', 'name'),
'//failure': ('message',),
})
self.assertNotIn(
'name="test_subTest_mixed (i=0)"'.encode('utf8'),
output)
self.assertIn(
'name="test_subTest_mixed (i=1)"'.encode('utf8'),
output)
@unittest.skipIf(not hasattr(unittest.TestCase, 'subTest'),
'unittest.TestCase.subTest not present.')
def test_unittest_subTest_pass(self):
# Test for issue #85
suite = unittest.TestSuite()
suite.addTest(self.DummySubTest('test_subTest_pass'))
self._test_xmlrunner(suite)
@unittest.skipIf(not hasattr(unittest.TestCase, 'subTest'),
'unittest.TestCase.subTest not present.')
def test_unittest_subTest_with_dots(self):
# Test for issue #85
suite = unittest.TestSuite()
suite.addTest(self.DummySubTest('test_subTest_with_dots'))
outdir = BytesIO()
self._test_xmlrunner(suite, outdir=outdir)
xmlcontent = outdir.getvalue().decode()
# Method name
self.assertNotIn('name="subTest', xmlcontent, 'parsing of test method name is not done correctly')
self.assertIn('name="test_subTest_with_dots (module=\'hello.world.subTest', xmlcontent)
# Class name
matchString = 'classname="tests.testsuite.XMLTestRunnerTestCase.DummySubTest.test_subTest_with_dots (module=\'hello.world"'
self.assertNotIn(matchString, xmlcontent, 'parsing of class name is not done correctly')
self.assertIn('classname="tests.testsuite.XMLTestRunnerTestCase.DummySubTest"', xmlcontent)
def test_xmlrunner_pass(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
self._test_xmlrunner(suite)
def test_xmlrunner_failfast(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_fail'))
suite.addTest(self.DummyTest('test_pass'))
outdir = BytesIO()
runner = xmlrunner.XMLTestRunner(
stream=self.stream, output=outdir,
verbosity=self.verbosity, failfast=True,
**self.runner_kwargs)
runner.run(suite)
outdir.seek(0)
output = outdir.read()
self.assertIn('test_fail'.encode('utf8'), output)
self.assertNotIn('test_pass'.encode('utf8'), output)
def test_xmlrunner_verbose(self):
self.verbosity = 1
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
self._test_xmlrunner(suite)
def test_xmlrunner_showall(self):
self.verbosity = 2
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
self._test_xmlrunner(suite)
def test_xmlrunner_cdata_section(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_cdata_section'))
self._test_xmlrunner(suite)
def test_xmlrunner_invalid_xml_chars_in_doc(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_invalid_xml_chars_in_doc'))
outdir = BytesIO()
runner = xmlrunner.XMLTestRunner(
stream=self.stream, output=outdir, verbosity=self.verbosity,
**self.runner_kwargs)
runner.run(suite)
outdir.seek(0)
output = outdir.read()
# Finally check if we have a valid XML document or not.
try:
minidom.parseString(output)
except Exception as e: # pragma: no cover
# note: we could remove the try/except, but it's more crude.
self.fail(e)
def test_xmlrunner_outsuffix(self):
self.runner_kwargs['outsuffix'] = '.somesuffix'
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
self._test_xmlrunner(suite)
xmlfile = glob(os.path.join(self.outdir, '*xml'))[0]
assert xmlfile.endswith('.somesuffix.xml')
def test_xmlrunner_nosuffix(self):
self.runner_kwargs['outsuffix'] = ''
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
self._test_xmlrunner(suite)
xmlfile = glob(os.path.join(self.outdir, '*xml'))[0]
xmlfile = os.path.basename(xmlfile)
assert xmlfile.endswith('DummyTest.xml')
def test_junitxml_properties(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
suite.properties = dict(key='value')
self._test_xmlrunner(suite)
def test_junitxml_xsd_validation_order(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_fail'))
suite.addTest(self.DummyTest('test_pass'))
suite.addTest(self.DummyTest('test_output_stdout_and_stderr'))
suite.properties = dict(key='value')
outdir = BytesIO()
runner = xmlrunner.XMLTestRunner(
stream=self.stream, output=outdir, verbosity=self.verbosity,
**self.runner_kwargs)
runner.run(suite)
outdir.seek(0)
output = outdir.read()
# poor man's schema validation; see issue #90
i_properties = output.index('<properties>'.encode('utf8'))
i_system_out = output.index('<system-out>'.encode('utf8'))
i_system_err = output.index('<system-err>'.encode('utf8'))
i_testcase = output.index('<testcase'.encode('utf8'))
self.assertTrue(i_properties < i_testcase <
i_system_out < i_system_err)
# XSD validation - for good measure.
validate_junit_report('14c6e39c38408b9ed6280361484a13c6f5becca7', output)
def test_junitxml_xsd_validation_empty_properties(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_fail'))
suite.addTest(self.DummyTest('test_pass'))
suite.properties = None
outdir = BytesIO()
runner = xmlrunner.XMLTestRunner(
stream=self.stream, output=outdir, verbosity=self.verbosity,
**self.runner_kwargs)
runner.run(suite)
outdir.seek(0)
output = outdir.read()
self.assertNotIn('<properties>'.encode('utf8'), output)
validate_junit_report('14c6e39c38408b9ed6280361484a13c6f5becca7', output)
@unittest.skipIf(hasattr(sys, 'pypy_version_info'),
'skip - PyPy + lxml seems to be hanging')
def test_xunit_plugin_transform(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_fail'))
suite.addTest(self.DummyTest('test_pass'))
suite.properties = None
outdir = BytesIO()
runner = xmlrunner.XMLTestRunner(
stream=self.stream, output=outdir, verbosity=self.verbosity,
**self.runner_kwargs)
runner.run(suite)
outdir.seek(0)
output = outdir.read()
validate_junit_report('14c6e39c38408b9ed6280361484a13c6f5becca7', output)
with self.assertRaises(etree.DocumentInvalid):
validate_junit_report('ae25da5089d4f94ac6c4669bf736e4d416cc4665', output)
from xmlrunner.extra.xunit_plugin import transform
transformed = transform(output)
validate_junit_report('14c6e39c38408b9ed6280361484a13c6f5becca7', transformed)
validate_junit_report('ae25da5089d4f94ac6c4669bf736e4d416cc4665', transformed)
self.assertIn('test_pass'.encode('utf8'), transformed)
self.assertIn('test_fail'.encode('utf8'), transformed)
def test_xmlrunner_elapsed_times(self):
self.runner_kwargs['elapsed_times'] = False
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
self._test_xmlrunner(suite)
def test_xmlrunner_resultclass(self):
class Result(_XMLTestResult):
pass
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
self.runner_kwargs['resultclass'] = Result
self._test_xmlrunner(suite)
def test_xmlrunner_stream(self):
stream = self.stream
output = BytesIO()
runner = xmlrunner.XMLTestRunner(
stream=stream, output=output, verbosity=self.verbosity,
**self.runner_kwargs)
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
runner.run(suite)
def test_xmlrunner_stream_empty_testsuite(self):
stream = self.stream
output = BytesIO()
runner = xmlrunner.XMLTestRunner(
stream=stream, output=output, verbosity=self.verbosity,
**self.runner_kwargs)
suite = unittest.TestSuite()
runner.run(suite)
def test_xmlrunner_output_subdir(self):
stream = self.stream
output = os.path.join(self.outdir, 'subdir')
runner = xmlrunner.XMLTestRunner(
stream=stream, output=output, verbosity=self.verbosity,
**self.runner_kwargs)
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
runner.run(suite)
def test_xmlrunner_patched_stdout(self):
old_stdout, old_stderr = sys.stdout, sys.stderr
try:
sys.stdout, sys.stderr = StringIO(), StringIO()
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
suite.properties = dict(key='value')
self._test_xmlrunner(suite)
finally:
sys.stdout, sys.stderr = old_stdout, old_stderr
def test_opaque_decorator(self):
suite = unittest.TestSuite()
suite.addTest(self.DecoratedUnitTest('test_pass'))
self._test_xmlrunner(suite)
testsuite_output = self.stream.getvalue()
self.assertNotIn('IOError:', testsuite_output)
def test_xmlrunner_error_in_call(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyErrorInCallTest('test_pass'))
self._test_xmlrunner(suite)
testsuite_output = self.stream.getvalue()
self.assertIn('Exception: Massive fail', testsuite_output)
@unittest.skipIf(not hasattr(sys, 'getrefcount'),
'skip - PyPy does not have sys.getrefcount.')
@unittest.skipIf((3, 0) <= sys.version_info < (3, 4),
'skip - test not garbage collected. '
'https://bugs.python.org/issue11798.')
def test_xmlrunner_hold_traceback(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyRefCountTest('test_fail'))
countBeforeTest = sys.getrefcount(self.DummyRefCountTest.dummy)
runner = self._test_xmlrunner(suite)
countAfterTest = sys.getrefcount(self.DummyRefCountTest.dummy)
self.assertEqual(countBeforeTest, countAfterTest)
class StderrXMLTestRunner(xmlrunner.XMLTestRunner):
"""
XMLTestRunner that outputs to sys.stderr that might be replaced
Though XMLTestRunner defaults to use sys.stderr as stream,
it cannot be replaced e.g. by replaced by capture_stdout_stderr(),
as it's already resolved.
This class resolved sys.stderr lazily and outputs to replaced sys.stderr.
"""
def __init__(self, **kwargs):
super(XMLTestRunnerTestCase.StderrXMLTestRunner, self).__init__(
stream=sys.stderr,
**kwargs
)
def test_test_program_succeed_with_buffer(self):
with capture_stdout_stderr() as r:
unittest.TestProgram(
module=self.__class__.__module__,
testRunner=self.StderrXMLTestRunner,
argv=[
sys.argv[0],
'-b',
'XMLTestRunnerTestCase.DummyTest.test_runner_buffer_output_pass',
],
exit=False,
)
self.assertNotIn('should not be printed', r[0].getvalue())
self.assertNotIn('should not be printed', r[1].getvalue())
def test_test_program_succeed_wo_buffer(self):
with capture_stdout_stderr() as r:
unittest.TestProgram(
module=self.__class__.__module__,
testRunner=self.StderrXMLTestRunner,
argv=[
sys.argv[0],
'XMLTestRunnerTestCase.DummyTest.test_runner_buffer_output_pass',
],
exit=False,
)
self.assertIn('should not be printed', r[0].getvalue())
self.assertNotIn('should not be printed', r[1].getvalue())
def test_test_program_fail_with_buffer(self):
with capture_stdout_stderr() as r:
unittest.TestProgram(
module=self.__class__.__module__,
testRunner=self.StderrXMLTestRunner,
argv=[
sys.argv[0],
'-b',
'XMLTestRunnerTestCase.DummyTest.test_runner_buffer_output_fail',
],
exit=False,
)
self.assertNotIn('should be printed', r[0].getvalue())
self.assertIn('should be printed', r[1].getvalue())
def test_test_program_fail_wo_buffer(self):
with capture_stdout_stderr() as r:
unittest.TestProgram(
module=self.__class__.__module__,
testRunner=self.StderrXMLTestRunner,
argv=[
sys.argv[0],
'XMLTestRunnerTestCase.DummyTest.test_runner_buffer_output_fail',
],
exit=False,
)
self.assertIn('should be printed', r[0].getvalue())
self.assertNotIn('should be printed', r[1].getvalue())
def test_partialmethod(self):
from functools import partialmethod
def test_partialmethod(test):
pass
class TestWithPartialmethod(unittest.TestCase):
pass
setattr(
TestWithPartialmethod,
'test_partialmethod',
partialmethod(test_partialmethod),
)
suite = unittest.TestSuite()
suite.addTest(TestWithPartialmethod('test_partialmethod'))
self._test_xmlrunner(suite)
class DuplicateWriterTestCase(unittest.TestCase):
def setUp(self):
fd, self.file = mkstemp()
self.fh = os.fdopen(fd, 'w')
self.buffer = StringIO()
self.writer = _DuplicateWriter(self.fh, self.buffer)
def tearDown(self):
self.buffer.close()
self.fh.close()
os.unlink(self.file)
def getFirstContent(self):
with open(self.file, 'r') as f:
return f.read()
def getSecondContent(self):
return self.buffer.getvalue()
def test_flush(self):
self.writer.write('foobarbaz')
self.writer.flush()
self.assertEqual(self.getFirstContent(), self.getSecondContent())
def test_writable(self):
self.assertTrue(self.writer.writable())
def test_writelines(self):
self.writer.writelines([
'foo\n',
'bar\n',
'baz\n',
])
self.writer.flush()
self.assertEqual(self.getFirstContent(), self.getSecondContent())
def test_write(self):
# try long buffer (1M)
buffer = 'x' * (1024 * 1024)
wrote = self.writer.write(buffer)
self.writer.flush()
self.assertEqual(self.getFirstContent(), self.getSecondContent())
self.assertEqual(wrote, len(self.getSecondContent()))
class XMLProgramTestCase(unittest.TestCase):
@mock.patch('sys.argv', ['xmlrunner', '-o', 'flaf'])
@mock.patch('xmlrunner.runner.XMLTestRunner')
@mock.patch('sys.exit')
def test_xmlrunner_output(self, exiter, testrunner):
xmlrunner.runner.XMLTestProgram()
kwargs = dict(
buffer=mock.ANY,
failfast=mock.ANY,
verbosity=mock.ANY,
warnings=mock.ANY,
output='flaf',
)
if sys.version_info[:2] > (3, 4):
kwargs.update(tb_locals=mock.ANY)
testrunner.assert_called_once_with(**kwargs)
exiter.assert_called_once_with(False)
@mock.patch('sys.argv', ['xmlrunner', '--output-file', 'test.xml'])
@mock.patch('xmlrunner.runner.open')
@mock.patch('xmlrunner.runner.XMLTestRunner')
@mock.patch('sys.exit')
def test_xmlrunner_output_file(self, exiter, testrunner, opener):
xmlrunner.runner.XMLTestProgram()
opener.assert_called_once_with('test.xml', 'wb')
open_file = opener()
open_file.close.assert_called_with()
kwargs = dict(
buffer=mock.ANY,
failfast=mock.ANY,
verbosity=mock.ANY,
warnings=mock.ANY,
output=open_file,
)
if sys.version_info[:2] > (3, 4):
kwargs.update(tb_locals=mock.ANY)
testrunner.assert_called_once_with(**kwargs)
exiter.assert_called_once_with(False)
@mock.patch('sys.argv', ['xmlrunner', '--outsuffix', ''])
@mock.patch('xmlrunner.runner.open')
@mock.patch('xmlrunner.runner.XMLTestRunner')
@mock.patch('sys.exit')
def test_xmlrunner_outsuffix(self, exiter, testrunner, opener):
xmlrunner.runner.XMLTestProgram()
kwargs = dict(
buffer=mock.ANY,
failfast=mock.ANY,
verbosity=mock.ANY,
warnings=mock.ANY,
outsuffix='',
)
if sys.version_info[:2] > (3, 4):
kwargs.update(tb_locals=mock.ANY)
testrunner.assert_called_once_with(**kwargs)
exiter.assert_called_once_with(False)
class ResolveFilenameTestCase(unittest.TestCase):
@mock.patch('os.path.relpath')
def test_resolve_filename_relative(self, relpath):
relpath.return_value = 'somefile.py'
filename = resolve_filename('/path/to/somefile.py')
self.assertEqual(filename, 'somefile.py')
@mock.patch('os.path.relpath')
def test_resolve_filename_outside(self, relpath):
relpath.return_value = '../../../tmp/somefile.py'
filename = resolve_filename('/tmp/somefile.py')
self.assertEqual(filename, '/tmp/somefile.py')
@mock.patch('os.path.relpath')
def test_resolve_filename_error(self, relpath):
relpath.side_effect = ValueError("ValueError: path is on mount 'C:', start on mount 'D:'")
filename = resolve_filename('C:\\path\\to\\somefile.py')
self.assertEqual(filename, 'C:\\path\\to\\somefile.py')
| 36.109496
| 131
| 0.620555
|
from __future__ import print_function
import contextlib
import io
import sys
from xmlrunner.unittest import unittest
import xmlrunner
from xmlrunner.result import _DuplicateWriter
from xmlrunner.result import _XMLTestResult
from xmlrunner.result import resolve_filename
import doctest
import tests.doctest_example
from io import StringIO, BytesIO
from tempfile import mkdtemp
from tempfile import mkstemp
from shutil import rmtree
from glob import glob
from xml.dom import minidom
from lxml import etree
import os
import os.path
from unittest import mock
def _load_schema(version):
path = os.path.join(
os.path.dirname(__file__),
'vendor/jenkins/xunit-plugin', version, 'junit-10.xsd')
with open(path, 'r') as schema_file:
schema_doc = etree.parse(schema_file)
schema = etree.XMLSchema(schema_doc)
return schema
raise RuntimeError('Could not load JUnit schema')
def validate_junit_report(version, text):
document = etree.parse(BytesIO(text))
schema = _load_schema(version)
schema.assertValid(document)
class DoctestTest(unittest.TestCase):
def test_doctest_example(self):
suite = doctest.DocTestSuite(tests.doctest_example)
outdir = BytesIO()
stream = StringIO()
runner = xmlrunner.XMLTestRunner(
stream=stream, output=outdir, verbosity=0)
runner.run(suite)
outdir.seek(0)
output = outdir.read()
self.assertIn('classname="tests.doctest_example.Multiplicator"'.encode('utf8'), output)
self.assertIn('name="threetimes"'.encode('utf8'), output)
self.assertIn('classname="tests.doctest_example"'.encode('utf8'), output)
self.assertIn('name="twice"'.encode('utf8'), output)
@contextlib.contextmanager
def capture_stdout_stderr():
orig_stdout = sys.stdout
orig_stderr = sys.stderr
sys.stdout = StringIO()
sys.stderr = StringIO()
try:
yield (sys.stdout, sys.stderr)
finally:
sys.stdout = orig_stdout
sys.stderr = orig_stderr
def _strip_xml(xml, changes):
doc = etree.fromstring(xml)
for xpath, attributes in changes.items():
for node in doc.xpath(xpath):
for attrib in node.attrib.keys():
if attrib not in attributes:
del node.attrib[attrib]
return etree.tostring(doc)
def some_decorator(f):
code = """\
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
"""
evaldict = dict(func=f)
exec(code, evaldict)
return evaldict['wrapper']
class XMLTestRunnerTestCase(unittest.TestCase):
class DummyTest(unittest.TestCase):
@unittest.skip("demonstrating skipping")
def test_skip(self):
pass
@unittest.skip(u"demonstrating non-ascii skipping: éçà")
def test_non_ascii_skip(self):
pass
def test_pass(self):
pass
def test_fail(self):
self.assertTrue(False)
@unittest.expectedFailure
def test_expected_failure(self):
self.assertTrue(False)
@unittest.expectedFailure
def test_unexpected_success(self):
pass
def test_error(self):
1 / 0
def test_cdata_section(self):
print('<![CDATA[content]]>')
def test_invalid_xml_chars_in_doc(self):
pass
def test_non_ascii_error(self):
self.assertEqual(u"éçà", 42)
def test_unsafe_unicode(self):
print(u"A\x00B\x08C\x0BD\x0C")
def test_output_stdout_and_stderr(self):
print('test on stdout')
print('test on stderr', file=sys.stderr)
def test_runner_buffer_output_pass(self):
print('should not be printed')
def test_runner_buffer_output_fail(self):
print('should be printed')
self.fail('expected to fail')
def test_output(self):
print('test message')
def test_non_ascii_runner_buffer_output_fail(self):
print(u'Where is the café ?')
self.fail(u'The café could not be found')
class DummySubTest(unittest.TestCase):
def test_subTest_pass(self):
for i in range(2):
with self.subTest(i=i):
pass
def test_subTest_fail(self):
for i in range(2):
with self.subTest(i=i):
self.fail('this is a subtest.')
def test_subTest_error(self):
for i in range(2):
with self.subTest(i=i):
raise Exception('this is a subtest')
def test_subTest_mixed(self):
for i in range(2):
with self.subTest(i=i):
self.assertLess(i, 1, msg='this is a subtest.')
def test_subTest_with_dots(self):
for i in range(2):
with self.subTest(module='hello.world.subTest{}'.format(i)):
self.fail('this is a subtest.')
class DecoratedUnitTest(unittest.TestCase):
@some_decorator
def test_pass(self):
pass
class DummyErrorInCallTest(unittest.TestCase):
def __call__(self, result):
try:
raise Exception('Massive fail')
except Exception:
result.addError(self, sys.exc_info())
return
def test_pass(self):
pass
class DummyRefCountTest(unittest.TestCase):
class dummy(object):
pass
def test_fail(self):
inst = self.dummy()
self.assertTrue(False)
def setUp(self):
self.stream = StringIO()
self.outdir = mkdtemp()
self.verbosity = 0
self.runner_kwargs = {}
self.addCleanup(rmtree, self.outdir)
def _test_xmlrunner(self, suite, runner=None, outdir=None):
if outdir is None:
outdir = self.outdir
stream = self.stream
verbosity = self.verbosity
runner_kwargs = self.runner_kwargs
if runner is None:
runner = xmlrunner.XMLTestRunner(
stream=stream, output=outdir, verbosity=verbosity,
**runner_kwargs)
if isinstance(outdir, BytesIO):
self.assertFalse(outdir.getvalue())
else:
self.assertEqual(0, len(glob(os.path.join(outdir, '*xml'))))
runner.run(suite)
if isinstance(outdir, BytesIO):
self.assertTrue(outdir.getvalue())
else:
self.assertEqual(1, len(glob(os.path.join(outdir, '*xml'))))
return runner
def test_basic_unittest_constructs(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
suite.addTest(self.DummyTest('test_skip'))
suite.addTest(self.DummyTest('test_fail'))
suite.addTest(self.DummyTest('test_expected_failure'))
suite.addTest(self.DummyTest('test_unexpected_success'))
suite.addTest(self.DummyTest('test_error'))
self._test_xmlrunner(suite)
def test_classnames(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
suite.addTest(self.DummySubTest('test_subTest_pass'))
outdir = BytesIO()
stream = StringIO()
runner = xmlrunner.XMLTestRunner(
stream=stream, output=outdir, verbosity=0)
runner.run(suite)
outdir.seek(0)
output = outdir.read()
output = _strip_xml(output, {
'//testsuite': (),
'//testcase': ('classname', 'name'),
'//failure': ('message',),
})
self.assertRegexpMatches(
output,
r'classname="tests\.testsuite\.(XMLTestRunnerTestCase\.)?'
r'DummyTest" name="test_pass"'.encode('utf8'),
)
self.assertRegexpMatches(
output,
r'classname="tests\.testsuite\.(XMLTestRunnerTestCase\.)?'
r'DummySubTest" name="test_subTest_pass"'.encode('utf8'),
)
def test_expected_failure(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_expected_failure'))
outdir = BytesIO()
self._test_xmlrunner(suite, outdir=outdir)
self.assertNotIn(b'<failure', outdir.getvalue())
self.assertNotIn(b'<error', outdir.getvalue())
self.assertIn(b'<skip', outdir.getvalue())
def test_unexpected_success(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_unexpected_success'))
outdir = BytesIO()
self._test_xmlrunner(suite, outdir=outdir)
self.assertNotIn(b'<failure', outdir.getvalue())
self.assertIn(b'<error', outdir.getvalue())
self.assertNotIn(b'<skip', outdir.getvalue())
def test_xmlrunner_non_ascii(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_non_ascii_skip'))
suite.addTest(self.DummyTest('test_non_ascii_error'))
outdir = BytesIO()
runner = xmlrunner.XMLTestRunner(
stream=self.stream, output=outdir, verbosity=self.verbosity,
**self.runner_kwargs)
runner.run(suite)
outdir.seek(0)
output = outdir.read()
self.assertIn(
u'message="demonstrating non-ascii skipping: éçà"'.encode('utf8'),
output)
def test_xmlrunner_safe_xml_encoding_name(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
outdir = BytesIO()
runner = xmlrunner.XMLTestRunner(
stream=self.stream, output=outdir, verbosity=self.verbosity,
**self.runner_kwargs)
runner.run(suite)
outdir.seek(0)
output = outdir.read()
firstline = output.splitlines()[0]
self.assertIn('encoding="UTF-8"'.encode('utf8'), firstline)
def test_xmlrunner_check_for_valid_xml_streamout(self):
class DummyTestA(unittest.TestCase):
def test_pass(self):
pass
class DummyTestB(unittest.TestCase):
def test_pass(self):
pass
suite = unittest.TestSuite()
suite.addTest(unittest.TestLoader().loadTestsFromTestCase(DummyTestA))
suite.addTest(unittest.TestLoader().loadTestsFromTestCase(DummyTestB))
outdir = BytesIO()
runner = xmlrunner.XMLTestRunner(
stream=self.stream, output=outdir, verbosity=self.verbosity,
**self.runner_kwargs)
runner.run(suite)
outdir.seek(0)
output = outdir.read()
try:
minidom.parseString(output)
except Exception as e:
self.fail(e)
def test_xmlrunner_unsafe_unicode(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_unsafe_unicode'))
outdir = BytesIO()
runner = xmlrunner.XMLTestRunner(
stream=self.stream, output=outdir, verbosity=self.verbosity,
**self.runner_kwargs)
runner.run(suite)
outdir.seek(0)
output = outdir.read()
self.assertIn(u"<![CDATA[ABCD\n]]>".encode('utf8'),
output)
def test_xmlrunner_non_ascii_failures(self):
self._xmlrunner_non_ascii_failures()
def test_xmlrunner_non_ascii_failures_buffered_output(self):
self._xmlrunner_non_ascii_failures(buffer=True)
def _xmlrunner_non_ascii_failures(self, buffer=False):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest(
'test_non_ascii_runner_buffer_output_fail'))
outdir = BytesIO()
runner = xmlrunner.XMLTestRunner(
stream=self.stream, output=outdir, verbosity=self.verbosity,
buffer=buffer, **self.runner_kwargs)
# allow output non-ascii letters to stdout
orig_stdout = sys.stdout
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8')
try:
runner.run(suite)
finally:
# Not to be closed when TextIOWrapper is disposed.
sys.stdout.detach()
sys.stdout = orig_stdout
outdir.seek(0)
output = outdir.read()
self.assertIn(
u'Where is the café ?'.encode('utf8'),
output)
self.assertIn(
u'The café could not be found'.encode('utf8'),
output)
@unittest.expectedFailure
def test_xmlrunner_buffer_output_pass(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_runner_buffer_output_pass'))
self._test_xmlrunner(suite)
testsuite_output = self.stream.getvalue()
# Since we are always buffering stdout/stderr
# it is currently troublesome to print anything at all
# and be consistent with --buffer option (issue #59)
self.assertIn('should not be printed', testsuite_output)
# this will be fixed when using the composite approach
# that was under development in the rewrite branch.
def test_xmlrunner_buffer_output_fail(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_runner_buffer_output_fail'))
# --buffer option
self.runner_kwargs['buffer'] = True
self._test_xmlrunner(suite)
testsuite_output = self.stream.getvalue()
self.assertIn('should be printed', testsuite_output)
def test_xmlrunner_output_without_buffer(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_output'))
with capture_stdout_stderr() as r:
self._test_xmlrunner(suite)
output_from_test = r[0].getvalue()
self.assertIn('test message', output_from_test)
def test_xmlrunner_output_with_buffer(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_output'))
# --buffer option
self.runner_kwargs['buffer'] = True
with capture_stdout_stderr() as r:
self._test_xmlrunner(suite)
output_from_test = r[0].getvalue()
self.assertNotIn('test message', output_from_test)
def test_xmlrunner_stdout_stderr_recovered_without_buffer(self):
orig_stdout = sys.stdout
orig_stderr = sys.stderr
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
self._test_xmlrunner(suite)
self.assertIs(orig_stdout, sys.stdout)
self.assertIs(orig_stderr, sys.stderr)
def test_xmlrunner_stdout_stderr_recovered_with_buffer(self):
orig_stdout = sys.stdout
orig_stderr = sys.stderr
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
# --buffer option
self.runner_kwargs['buffer'] = True
self._test_xmlrunner(suite)
self.assertIs(orig_stdout, sys.stdout)
self.assertIs(orig_stderr, sys.stderr)
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
@unittest.skipIf(not hasattr(unittest.TestCase, 'subTest'),
'unittest.TestCase.subTest not present.')
def test_unittest_subTest_fail(self):
# test for issue #77
outdir = BytesIO()
runner = xmlrunner.XMLTestRunner(
stream=self.stream, output=outdir, verbosity=self.verbosity,
**self.runner_kwargs)
suite = unittest.TestSuite()
suite.addTest(self.DummySubTest('test_subTest_fail'))
runner.run(suite)
outdir.seek(0)
output = outdir.read()
output = _strip_xml(output, {
'//testsuite': (),
'//testcase': ('classname', 'name'),
'//failure': ('message',),
})
self.assertRegexpMatches(
output,
br'<testcase classname="tests\.testsuite\.'
br'(XMLTestRunnerTestCase\.)?DummySubTest" '
br'name="test_subTest_fail \(i=0\)"')
self.assertRegexpMatches(
output,
br'<testcase classname="tests\.testsuite\.'
br'(XMLTestRunnerTestCase\.)?DummySubTest" '
br'name="test_subTest_fail \(i=1\)"')
@unittest.skipIf(not hasattr(unittest.TestCase, 'subTest'),
'unittest.TestCase.subTest not present.')
def test_unittest_subTest_error(self):
# test for issue #155
outdir = BytesIO()
runner = xmlrunner.XMLTestRunner(
stream=self.stream, output=outdir, verbosity=self.verbosity,
**self.runner_kwargs)
suite = unittest.TestSuite()
suite.addTest(self.DummySubTest('test_subTest_error'))
runner.run(suite)
outdir.seek(0)
output = outdir.read()
output = _strip_xml(output, {
'//testsuite': (),
'//testcase': ('classname', 'name'),
'//failure': ('message',),
})
self.assertRegexpMatches(
output,
br'<testcase classname="tests\.testsuite\.'
br'(XMLTestRunnerTestCase\.)?DummySubTest" '
br'name="test_subTest_error \(i=0\)"')
self.assertRegexpMatches(
output,
br'<testcase classname="tests\.testsuite\.'
br'(XMLTestRunnerTestCase\.)?DummySubTest" '
br'name="test_subTest_error \(i=1\)"')
@unittest.skipIf(not hasattr(unittest.TestCase, 'subTest'),
'unittest.TestCase.subTest not present.')
def test_unittest_subTest_mixed(self):
# test for issue #155
outdir = BytesIO()
runner = xmlrunner.XMLTestRunner(
stream=self.stream, output=outdir, verbosity=self.verbosity,
**self.runner_kwargs)
suite = unittest.TestSuite()
suite.addTest(self.DummySubTest('test_subTest_mixed'))
runner.run(suite)
outdir.seek(0)
output = outdir.read()
output = _strip_xml(output, {
'//testsuite': (),
'//testcase': ('classname', 'name'),
'//failure': ('message',),
})
self.assertNotIn(
'name="test_subTest_mixed (i=0)"'.encode('utf8'),
output)
self.assertIn(
'name="test_subTest_mixed (i=1)"'.encode('utf8'),
output)
@unittest.skipIf(not hasattr(unittest.TestCase, 'subTest'),
'unittest.TestCase.subTest not present.')
def test_unittest_subTest_pass(self):
# Test for issue #85
suite = unittest.TestSuite()
suite.addTest(self.DummySubTest('test_subTest_pass'))
self._test_xmlrunner(suite)
@unittest.skipIf(not hasattr(unittest.TestCase, 'subTest'),
'unittest.TestCase.subTest not present.')
def test_unittest_subTest_with_dots(self):
# Test for issue #85
suite = unittest.TestSuite()
suite.addTest(self.DummySubTest('test_subTest_with_dots'))
outdir = BytesIO()
self._test_xmlrunner(suite, outdir=outdir)
xmlcontent = outdir.getvalue().decode()
# Method name
self.assertNotIn('name="subTest', xmlcontent, 'parsing of test method name is not done correctly')
self.assertIn('name="test_subTest_with_dots (module=\'hello.world.subTest', xmlcontent)
matchString = 'classname="tests.testsuite.XMLTestRunnerTestCase.DummySubTest.test_subTest_with_dots (module=\'hello.world"'
self.assertNotIn(matchString, xmlcontent, 'parsing of class name is not done correctly')
self.assertIn('classname="tests.testsuite.XMLTestRunnerTestCase.DummySubTest"', xmlcontent)
def test_xmlrunner_pass(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
self._test_xmlrunner(suite)
def test_xmlrunner_failfast(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_fail'))
suite.addTest(self.DummyTest('test_pass'))
outdir = BytesIO()
runner = xmlrunner.XMLTestRunner(
stream=self.stream, output=outdir,
verbosity=self.verbosity, failfast=True,
**self.runner_kwargs)
runner.run(suite)
outdir.seek(0)
output = outdir.read()
self.assertIn('test_fail'.encode('utf8'), output)
self.assertNotIn('test_pass'.encode('utf8'), output)
def test_xmlrunner_verbose(self):
self.verbosity = 1
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
self._test_xmlrunner(suite)
def test_xmlrunner_showall(self):
self.verbosity = 2
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
self._test_xmlrunner(suite)
def test_xmlrunner_cdata_section(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_cdata_section'))
self._test_xmlrunner(suite)
def test_xmlrunner_invalid_xml_chars_in_doc(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_invalid_xml_chars_in_doc'))
outdir = BytesIO()
runner = xmlrunner.XMLTestRunner(
stream=self.stream, output=outdir, verbosity=self.verbosity,
**self.runner_kwargs)
runner.run(suite)
outdir.seek(0)
output = outdir.read()
# Finally check if we have a valid XML document or not.
try:
minidom.parseString(output)
except Exception as e: # pragma: no cover
# note: we could remove the try/except, but it's more crude.
self.fail(e)
def test_xmlrunner_outsuffix(self):
self.runner_kwargs['outsuffix'] = '.somesuffix'
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
self._test_xmlrunner(suite)
xmlfile = glob(os.path.join(self.outdir, '*xml'))[0]
assert xmlfile.endswith('.somesuffix.xml')
def test_xmlrunner_nosuffix(self):
self.runner_kwargs['outsuffix'] = ''
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
self._test_xmlrunner(suite)
xmlfile = glob(os.path.join(self.outdir, '*xml'))[0]
xmlfile = os.path.basename(xmlfile)
assert xmlfile.endswith('DummyTest.xml')
def test_junitxml_properties(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
suite.properties = dict(key='value')
self._test_xmlrunner(suite)
def test_junitxml_xsd_validation_order(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_fail'))
suite.addTest(self.DummyTest('test_pass'))
suite.addTest(self.DummyTest('test_output_stdout_and_stderr'))
suite.properties = dict(key='value')
outdir = BytesIO()
runner = xmlrunner.XMLTestRunner(
stream=self.stream, output=outdir, verbosity=self.verbosity,
**self.runner_kwargs)
runner.run(suite)
outdir.seek(0)
output = outdir.read()
i_properties = output.index('<properties>'.encode('utf8'))
i_system_out = output.index('<system-out>'.encode('utf8'))
i_system_err = output.index('<system-err>'.encode('utf8'))
i_testcase = output.index('<testcase'.encode('utf8'))
self.assertTrue(i_properties < i_testcase <
i_system_out < i_system_err)
# XSD validation - for good measure.
validate_junit_report('14c6e39c38408b9ed6280361484a13c6f5becca7', output)
def test_junitxml_xsd_validation_empty_properties(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_fail'))
suite.addTest(self.DummyTest('test_pass'))
suite.properties = None
outdir = BytesIO()
runner = xmlrunner.XMLTestRunner(
stream=self.stream, output=outdir, verbosity=self.verbosity,
**self.runner_kwargs)
runner.run(suite)
outdir.seek(0)
output = outdir.read()
self.assertNotIn('<properties>'.encode('utf8'), output)
validate_junit_report('14c6e39c38408b9ed6280361484a13c6f5becca7', output)
@unittest.skipIf(hasattr(sys, 'pypy_version_info'),
'skip - PyPy + lxml seems to be hanging')
def test_xunit_plugin_transform(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_fail'))
suite.addTest(self.DummyTest('test_pass'))
suite.properties = None
outdir = BytesIO()
runner = xmlrunner.XMLTestRunner(
stream=self.stream, output=outdir, verbosity=self.verbosity,
**self.runner_kwargs)
runner.run(suite)
outdir.seek(0)
output = outdir.read()
validate_junit_report('14c6e39c38408b9ed6280361484a13c6f5becca7', output)
with self.assertRaises(etree.DocumentInvalid):
validate_junit_report('ae25da5089d4f94ac6c4669bf736e4d416cc4665', output)
from xmlrunner.extra.xunit_plugin import transform
transformed = transform(output)
validate_junit_report('14c6e39c38408b9ed6280361484a13c6f5becca7', transformed)
validate_junit_report('ae25da5089d4f94ac6c4669bf736e4d416cc4665', transformed)
self.assertIn('test_pass'.encode('utf8'), transformed)
self.assertIn('test_fail'.encode('utf8'), transformed)
def test_xmlrunner_elapsed_times(self):
self.runner_kwargs['elapsed_times'] = False
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
self._test_xmlrunner(suite)
def test_xmlrunner_resultclass(self):
class Result(_XMLTestResult):
pass
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
self.runner_kwargs['resultclass'] = Result
self._test_xmlrunner(suite)
def test_xmlrunner_stream(self):
stream = self.stream
output = BytesIO()
runner = xmlrunner.XMLTestRunner(
stream=stream, output=output, verbosity=self.verbosity,
**self.runner_kwargs)
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
runner.run(suite)
def test_xmlrunner_stream_empty_testsuite(self):
stream = self.stream
output = BytesIO()
runner = xmlrunner.XMLTestRunner(
stream=stream, output=output, verbosity=self.verbosity,
**self.runner_kwargs)
suite = unittest.TestSuite()
runner.run(suite)
def test_xmlrunner_output_subdir(self):
stream = self.stream
output = os.path.join(self.outdir, 'subdir')
runner = xmlrunner.XMLTestRunner(
stream=stream, output=output, verbosity=self.verbosity,
**self.runner_kwargs)
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
runner.run(suite)
def test_xmlrunner_patched_stdout(self):
old_stdout, old_stderr = sys.stdout, sys.stderr
try:
sys.stdout, sys.stderr = StringIO(), StringIO()
suite = unittest.TestSuite()
suite.addTest(self.DummyTest('test_pass'))
suite.properties = dict(key='value')
self._test_xmlrunner(suite)
finally:
sys.stdout, sys.stderr = old_stdout, old_stderr
def test_opaque_decorator(self):
suite = unittest.TestSuite()
suite.addTest(self.DecoratedUnitTest('test_pass'))
self._test_xmlrunner(suite)
testsuite_output = self.stream.getvalue()
self.assertNotIn('IOError:', testsuite_output)
def test_xmlrunner_error_in_call(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyErrorInCallTest('test_pass'))
self._test_xmlrunner(suite)
testsuite_output = self.stream.getvalue()
self.assertIn('Exception: Massive fail', testsuite_output)
@unittest.skipIf(not hasattr(sys, 'getrefcount'),
'skip - PyPy does not have sys.getrefcount.')
@unittest.skipIf((3, 0) <= sys.version_info < (3, 4),
'skip - test not garbage collected. '
'https://bugs.python.org/issue11798.')
def test_xmlrunner_hold_traceback(self):
suite = unittest.TestSuite()
suite.addTest(self.DummyRefCountTest('test_fail'))
countBeforeTest = sys.getrefcount(self.DummyRefCountTest.dummy)
runner = self._test_xmlrunner(suite)
countAfterTest = sys.getrefcount(self.DummyRefCountTest.dummy)
self.assertEqual(countBeforeTest, countAfterTest)
class StderrXMLTestRunner(xmlrunner.XMLTestRunner):
def __init__(self, **kwargs):
super(XMLTestRunnerTestCase.StderrXMLTestRunner, self).__init__(
stream=sys.stderr,
**kwargs
)
def test_test_program_succeed_with_buffer(self):
with capture_stdout_stderr() as r:
unittest.TestProgram(
module=self.__class__.__module__,
testRunner=self.StderrXMLTestRunner,
argv=[
sys.argv[0],
'-b',
'XMLTestRunnerTestCase.DummyTest.test_runner_buffer_output_pass',
],
exit=False,
)
self.assertNotIn('should not be printed', r[0].getvalue())
self.assertNotIn('should not be printed', r[1].getvalue())
def test_test_program_succeed_wo_buffer(self):
with capture_stdout_stderr() as r:
unittest.TestProgram(
module=self.__class__.__module__,
testRunner=self.StderrXMLTestRunner,
argv=[
sys.argv[0],
'XMLTestRunnerTestCase.DummyTest.test_runner_buffer_output_pass',
],
exit=False,
)
self.assertIn('should not be printed', r[0].getvalue())
self.assertNotIn('should not be printed', r[1].getvalue())
def test_test_program_fail_with_buffer(self):
with capture_stdout_stderr() as r:
unittest.TestProgram(
module=self.__class__.__module__,
testRunner=self.StderrXMLTestRunner,
argv=[
sys.argv[0],
'-b',
'XMLTestRunnerTestCase.DummyTest.test_runner_buffer_output_fail',
],
exit=False,
)
self.assertNotIn('should be printed', r[0].getvalue())
self.assertIn('should be printed', r[1].getvalue())
def test_test_program_fail_wo_buffer(self):
with capture_stdout_stderr() as r:
unittest.TestProgram(
module=self.__class__.__module__,
testRunner=self.StderrXMLTestRunner,
argv=[
sys.argv[0],
'XMLTestRunnerTestCase.DummyTest.test_runner_buffer_output_fail',
],
exit=False,
)
self.assertIn('should be printed', r[0].getvalue())
self.assertNotIn('should be printed', r[1].getvalue())
def test_partialmethod(self):
from functools import partialmethod
def test_partialmethod(test):
pass
class TestWithPartialmethod(unittest.TestCase):
pass
setattr(
TestWithPartialmethod,
'test_partialmethod',
partialmethod(test_partialmethod),
)
suite = unittest.TestSuite()
suite.addTest(TestWithPartialmethod('test_partialmethod'))
self._test_xmlrunner(suite)
class DuplicateWriterTestCase(unittest.TestCase):
def setUp(self):
fd, self.file = mkstemp()
self.fh = os.fdopen(fd, 'w')
self.buffer = StringIO()
self.writer = _DuplicateWriter(self.fh, self.buffer)
def tearDown(self):
self.buffer.close()
self.fh.close()
os.unlink(self.file)
def getFirstContent(self):
with open(self.file, 'r') as f:
return f.read()
def getSecondContent(self):
return self.buffer.getvalue()
def test_flush(self):
self.writer.write('foobarbaz')
self.writer.flush()
self.assertEqual(self.getFirstContent(), self.getSecondContent())
def test_writable(self):
self.assertTrue(self.writer.writable())
def test_writelines(self):
self.writer.writelines([
'foo\n',
'bar\n',
'baz\n',
])
self.writer.flush()
self.assertEqual(self.getFirstContent(), self.getSecondContent())
def test_write(self):
# try long buffer (1M)
buffer = 'x' * (1024 * 1024)
wrote = self.writer.write(buffer)
self.writer.flush()
self.assertEqual(self.getFirstContent(), self.getSecondContent())
self.assertEqual(wrote, len(self.getSecondContent()))
class XMLProgramTestCase(unittest.TestCase):
@mock.patch('sys.argv', ['xmlrunner', '-o', 'flaf'])
@mock.patch('xmlrunner.runner.XMLTestRunner')
@mock.patch('sys.exit')
def test_xmlrunner_output(self, exiter, testrunner):
xmlrunner.runner.XMLTestProgram()
kwargs = dict(
buffer=mock.ANY,
failfast=mock.ANY,
verbosity=mock.ANY,
warnings=mock.ANY,
output='flaf',
)
if sys.version_info[:2] > (3, 4):
kwargs.update(tb_locals=mock.ANY)
testrunner.assert_called_once_with(**kwargs)
exiter.assert_called_once_with(False)
@mock.patch('sys.argv', ['xmlrunner', '--output-file', 'test.xml'])
@mock.patch('xmlrunner.runner.open')
@mock.patch('xmlrunner.runner.XMLTestRunner')
@mock.patch('sys.exit')
def test_xmlrunner_output_file(self, exiter, testrunner, opener):
xmlrunner.runner.XMLTestProgram()
opener.assert_called_once_with('test.xml', 'wb')
open_file = opener()
open_file.close.assert_called_with()
kwargs = dict(
buffer=mock.ANY,
failfast=mock.ANY,
verbosity=mock.ANY,
warnings=mock.ANY,
output=open_file,
)
if sys.version_info[:2] > (3, 4):
kwargs.update(tb_locals=mock.ANY)
testrunner.assert_called_once_with(**kwargs)
exiter.assert_called_once_with(False)
@mock.patch('sys.argv', ['xmlrunner', '--outsuffix', ''])
@mock.patch('xmlrunner.runner.open')
@mock.patch('xmlrunner.runner.XMLTestRunner')
@mock.patch('sys.exit')
def test_xmlrunner_outsuffix(self, exiter, testrunner, opener):
xmlrunner.runner.XMLTestProgram()
kwargs = dict(
buffer=mock.ANY,
failfast=mock.ANY,
verbosity=mock.ANY,
warnings=mock.ANY,
outsuffix='',
)
if sys.version_info[:2] > (3, 4):
kwargs.update(tb_locals=mock.ANY)
testrunner.assert_called_once_with(**kwargs)
exiter.assert_called_once_with(False)
class ResolveFilenameTestCase(unittest.TestCase):
@mock.patch('os.path.relpath')
def test_resolve_filename_relative(self, relpath):
relpath.return_value = 'somefile.py'
filename = resolve_filename('/path/to/somefile.py')
self.assertEqual(filename, 'somefile.py')
@mock.patch('os.path.relpath')
def test_resolve_filename_outside(self, relpath):
relpath.return_value = '../../../tmp/somefile.py'
filename = resolve_filename('/tmp/somefile.py')
self.assertEqual(filename, '/tmp/somefile.py')
@mock.patch('os.path.relpath')
def test_resolve_filename_error(self, relpath):
relpath.side_effect = ValueError("ValueError: path is on mount 'C:', start on mount 'D:'")
filename = resolve_filename('C:\\path\\to\\somefile.py')
self.assertEqual(filename, 'C:\\path\\to\\somefile.py')
| true
| true
|
1c47eee4810990be4161b62e4a305d1fbceda5f0
| 3,159
|
py
|
Python
|
src/dkn_kg_preprocess.py
|
andreeaiana/geneg_benchmarking
|
0b53989c79b8e3771c144c0332fd36587dfe0f4d
|
[
"MIT"
] | 1
|
2021-12-08T12:02:56.000Z
|
2021-12-08T12:02:56.000Z
|
src/dkn_kg_preprocess.py
|
andreeaiana/geneg_benchmarking
|
0b53989c79b8e3771c144c0332fd36587dfe0f4d
|
[
"MIT"
] | null | null | null |
src/dkn_kg_preprocess.py
|
andreeaiana/geneg_benchmarking
|
0b53989c79b8e3771c144c0332fd36587dfe0f4d
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# DISCLAIMER
# This code file is forked and adapted from https://github.com/hwwang55/DKN/blob/master/data/kg/kg_preprocess.py
# import libraries
import os
import numpy as np
from pathlib import Path
from typing import Dict, List
# import custom code
from src.config import DKN_KGE_METHOD, DKN_KGE_ENTITY_EMBEDDING_DIM
from src.config import FILENAME_ENTITY2INDEX, FILENAME_ENTITY2ID, FILENAME_TRIPLE2ID
from src.config import DATA_DIR
from src.util.logger import setup_logging
def read_map(file: Path) -> Dict[int, int]:
entity2index_map = {}
reader = open(file, encoding='utf-8')
for line in reader:
array = line.split('\t')
if len(array) != 2: # to skip the first line in entity2id.txt
continue
entity_id = array[0]
index = int(array[1])
entity2index_map[entity_id] = index
reader.close()
return entity2index_map
def get_neighbors_for_entity(file: Path) -> Dict[int, List[int]]:
reader = open(file, encoding='utf-8')
entity2neighbor_map = {}
for line in reader:
array = line.strip().split('\t')
if len(array) != 3: # to skip the first line in triple2id.txt
continue
head = int(array[0])
tail = int(array[1])
if head in entity2neighbor_map:
entity2neighbor_map[head].append(tail)
else:
entity2neighbor_map[head] = [tail]
if tail in entity2neighbor_map:
entity2neighbor_map[tail].append(head)
else:
entity2neighbor_map[tail] = [head]
reader.close()
return entity2neighbor_map
if __name__ == '__main__':
logger = setup_logging(name=__file__, log_level='info')
# entity2index.txt (generated by news_preprocess.py) contains all entities appear in the dataset
# entity2id.txt (generated by prepare_data_for_transx.py) contains all entities in the crawled knowledge graph
entity2index = read_map(FILENAME_ENTITY2INDEX)
full_entity2index = read_map(FILENAME_ENTITY2ID)
entity2neighbor = get_neighbors_for_entity(FILENAME_TRIPLE2ID)
full_embeddings = np.loadtxt(os.path.join(DATA_DIR, DKN_KGE_METHOD + '_entity2vec_' + str(DKN_KGE_ENTITY_EMBEDDING_DIM) + '.vec'))
entity_embeddings = np.zeros([len(entity2index) + 1, DKN_KGE_ENTITY_EMBEDDING_DIM])
context_embeddings = np.zeros([len(entity2index) + 1, DKN_KGE_ENTITY_EMBEDDING_DIM])
logger.info('writing entity embeddings...')
for entity, index in entity2index.items():
if entity in full_entity2index:
full_index = full_entity2index[entity]
entity_embeddings[index] = full_embeddings[full_index]
if full_index in entity2neighbor:
context_full_indices = entity2neighbor[full_index]
context_embeddings[index] = np.average(full_embeddings[context_full_indices], axis=0)
np.save(os.path.join(DATA_DIR, 'entity_embeddings_' + DKN_KGE_METHOD + '_' + str(DKN_KGE_ENTITY_EMBEDDING_DIM)), entity_embeddings)
np.save(os.path.join(DATA_DIR, 'context_embeddings_' + DKN_KGE_METHOD + '_' + str(DKN_KGE_ENTITY_EMBEDDING_DIM)), context_embeddings)
| 40.5
| 137
| 0.703704
|
import os
import numpy as np
from pathlib import Path
from typing import Dict, List
from src.config import DKN_KGE_METHOD, DKN_KGE_ENTITY_EMBEDDING_DIM
from src.config import FILENAME_ENTITY2INDEX, FILENAME_ENTITY2ID, FILENAME_TRIPLE2ID
from src.config import DATA_DIR
from src.util.logger import setup_logging
def read_map(file: Path) -> Dict[int, int]:
entity2index_map = {}
reader = open(file, encoding='utf-8')
for line in reader:
array = line.split('\t')
if len(array) != 2:
continue
entity_id = array[0]
index = int(array[1])
entity2index_map[entity_id] = index
reader.close()
return entity2index_map
def get_neighbors_for_entity(file: Path) -> Dict[int, List[int]]:
reader = open(file, encoding='utf-8')
entity2neighbor_map = {}
for line in reader:
array = line.strip().split('\t')
if len(array) != 3:
continue
head = int(array[0])
tail = int(array[1])
if head in entity2neighbor_map:
entity2neighbor_map[head].append(tail)
else:
entity2neighbor_map[head] = [tail]
if tail in entity2neighbor_map:
entity2neighbor_map[tail].append(head)
else:
entity2neighbor_map[tail] = [head]
reader.close()
return entity2neighbor_map
if __name__ == '__main__':
logger = setup_logging(name=__file__, log_level='info')
entity2index = read_map(FILENAME_ENTITY2INDEX)
full_entity2index = read_map(FILENAME_ENTITY2ID)
entity2neighbor = get_neighbors_for_entity(FILENAME_TRIPLE2ID)
full_embeddings = np.loadtxt(os.path.join(DATA_DIR, DKN_KGE_METHOD + '_entity2vec_' + str(DKN_KGE_ENTITY_EMBEDDING_DIM) + '.vec'))
entity_embeddings = np.zeros([len(entity2index) + 1, DKN_KGE_ENTITY_EMBEDDING_DIM])
context_embeddings = np.zeros([len(entity2index) + 1, DKN_KGE_ENTITY_EMBEDDING_DIM])
logger.info('writing entity embeddings...')
for entity, index in entity2index.items():
if entity in full_entity2index:
full_index = full_entity2index[entity]
entity_embeddings[index] = full_embeddings[full_index]
if full_index in entity2neighbor:
context_full_indices = entity2neighbor[full_index]
context_embeddings[index] = np.average(full_embeddings[context_full_indices], axis=0)
np.save(os.path.join(DATA_DIR, 'entity_embeddings_' + DKN_KGE_METHOD + '_' + str(DKN_KGE_ENTITY_EMBEDDING_DIM)), entity_embeddings)
np.save(os.path.join(DATA_DIR, 'context_embeddings_' + DKN_KGE_METHOD + '_' + str(DKN_KGE_ENTITY_EMBEDDING_DIM)), context_embeddings)
| true
| true
|
1c47f02bbbb62967084bcd65ed4058613766c005
| 179
|
py
|
Python
|
frappe/core/doctype/user_type/test_user_type.py
|
ssuda777/frappe
|
d3f3df2ce15154aecc1d9d6d07d947e72c2e8c6e
|
[
"MIT"
] | 1
|
2021-12-18T18:37:29.000Z
|
2021-12-18T18:37:29.000Z
|
frappe/core/doctype/user_type/test_user_type.py
|
JMBodz/frappe
|
eb218a06d1cbfc3a8f1cc00ba8dac2c927d2f71d
|
[
"MIT"
] | 3
|
2021-02-27T11:50:14.000Z
|
2021-05-03T06:48:49.000Z
|
frappe/core/doctype/user_type/test_user_type.py
|
JMBodz/frappe
|
eb218a06d1cbfc3a8f1cc00ba8dac2c927d2f71d
|
[
"MIT"
] | 2
|
2021-09-02T09:51:55.000Z
|
2021-09-07T04:55:42.000Z
|
# -*- coding: utf-8 -*-
# Copyright (c) 2021, Frappe Technologies and Contributors
# See license.txt
# import frappe
import unittest
class TestUserType(unittest.TestCase):
pass
| 19.888889
| 58
| 0.743017
|
import unittest
class TestUserType(unittest.TestCase):
pass
| true
| true
|
1c47f12210994a95c7538f1240f7559389bdc3de
| 1,707
|
py
|
Python
|
watcher.py
|
yijianduanlang/n95-py
|
6efefe9497ed95a75c7712323a85daec7eb7f3aa
|
[
"MIT"
] | 357
|
2020-02-08T07:07:18.000Z
|
2022-03-26T02:44:53.000Z
|
watcher.py
|
tavernier/N95-watcher
|
6efefe9497ed95a75c7712323a85daec7eb7f3aa
|
[
"MIT"
] | 14
|
2020-02-10T08:12:58.000Z
|
2022-03-12T00:15:19.000Z
|
watcher.py
|
tavernier/N95-watcher
|
6efefe9497ed95a75c7712323a85daec7eb7f3aa
|
[
"MIT"
] | 107
|
2020-02-08T13:26:27.000Z
|
2022-03-05T04:38:26.000Z
|
import sys
import os, time, json
lib_path = os.path.join(os.path.dirname(__file__))[:-3]
sys.path.append(lib_path)
from selenium import webdriver
from log.logger import logger as log
from PIL import Image
browser = None
def check_shop(url, keywords):
global browser
browser.get(url)
time.sleep(5)
find_flag = False
for keyword in keywords:
if keyword in browser.page_source:
find_flag = keyword
break
if not find_flag and '出错啦' not in browser.title:
log.warning("FIND!!!")
log.warning(url)
log.warning(keywords)
# "发现口罩有货!!",
fo = open("../data.txt", "r")
lines = fo.readlines()
fo.close()
fo = open("../data.txt", "w")
str_time = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
lines.append(str_time+" "+browser.title+" url:"+url+"\n")
fo.writelines(lines)
fo.close()
print("发现口罩有货!!"+url)
browser.save_screenshot("imgs/" + str_time + ".png")
time.sleep(5)
def check_all_shops():
with open(os.path.join(os.path.dirname(__file__),"config","shop.json"), "r", encoding='UTF-8') as f:
infos = json.loads(f.read())
for info in infos:
for shop in info["shop"]:
log.info("checking {} / {}".format(shop, info.get("keyword")))
keywords = info.get("key_word").split(",")
check_shop(shop, keywords)
# 加载 config/shop.json 中的商品,并检查有货状态,如果有货保存在 data.txt 中
if __name__ == "__main__":
browser = webdriver.Chrome(os.path.join(os.path.dirname(__file__),"src", "chromedriver"))
while True:
check_all_shops()
# browser.quit()
| 28.932203
| 104
| 0.589338
|
import sys
import os, time, json
lib_path = os.path.join(os.path.dirname(__file__))[:-3]
sys.path.append(lib_path)
from selenium import webdriver
from log.logger import logger as log
from PIL import Image
browser = None
def check_shop(url, keywords):
global browser
browser.get(url)
time.sleep(5)
find_flag = False
for keyword in keywords:
if keyword in browser.page_source:
find_flag = keyword
break
if not find_flag and '出错啦' not in browser.title:
log.warning("FIND!!!")
log.warning(url)
log.warning(keywords)
fo = open("../data.txt", "r")
lines = fo.readlines()
fo.close()
fo = open("../data.txt", "w")
str_time = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
lines.append(str_time+" "+browser.title+" url:"+url+"\n")
fo.writelines(lines)
fo.close()
print("发现口罩有货!!"+url)
browser.save_screenshot("imgs/" + str_time + ".png")
time.sleep(5)
def check_all_shops():
with open(os.path.join(os.path.dirname(__file__),"config","shop.json"), "r", encoding='UTF-8') as f:
infos = json.loads(f.read())
for info in infos:
for shop in info["shop"]:
log.info("checking {} / {}".format(shop, info.get("keyword")))
keywords = info.get("key_word").split(",")
check_shop(shop, keywords)
if __name__ == "__main__":
browser = webdriver.Chrome(os.path.join(os.path.dirname(__file__),"src", "chromedriver"))
while True:
check_all_shops()
| true
| true
|
1c47f140c05031e8eb49b0a4ff76d0583d02aea8
| 326
|
py
|
Python
|
trial and error method.py
|
ankanpramanik/Solve-Equation-wby-Trial-and-Error
|
f0456354ad447988672bf97422ccf3245b094b6f
|
[
"MIT"
] | null | null | null |
trial and error method.py
|
ankanpramanik/Solve-Equation-wby-Trial-and-Error
|
f0456354ad447988672bf97422ccf3245b094b6f
|
[
"MIT"
] | null | null | null |
trial and error method.py
|
ankanpramanik/Solve-Equation-wby-Trial-and-Error
|
f0456354ad447988672bf97422ccf3245b094b6f
|
[
"MIT"
] | 1
|
2021-11-21T10:26:13.000Z
|
2021-11-21T10:26:13.000Z
|
N1=int(input("Enter the value of coefficient of n1 "))
N2=int(input("Enter the value of coefficient of n2 "))
N=int(input("Enter the value of constant "))
for n1 in range(101):
for n2 in range(101):
if N1*n1+ N2*n2 == N:
print(n1, n2)
elif N1*n1-N2*n2 ==N:
print (n1,n2)
| 29.636364
| 55
| 0.564417
|
N1=int(input("Enter the value of coefficient of n1 "))
N2=int(input("Enter the value of coefficient of n2 "))
N=int(input("Enter the value of constant "))
for n1 in range(101):
for n2 in range(101):
if N1*n1+ N2*n2 == N:
print(n1, n2)
elif N1*n1-N2*n2 ==N:
print (n1,n2)
| true
| true
|
1c47f24060a389cff1da511978f70c8656a55176
| 1,646
|
py
|
Python
|
config/urls.py
|
lalfaro1704/backend_test
|
b94c8768d50829993f4035d522deb72deedce795
|
[
"MIT"
] | null | null | null |
config/urls.py
|
lalfaro1704/backend_test
|
b94c8768d50829993f4035d522deb72deedce795
|
[
"MIT"
] | null | null | null |
config/urls.py
|
lalfaro1704/backend_test
|
b94c8768d50829993f4035d522deb72deedce795
|
[
"MIT"
] | null | null | null |
from django.conf import settings
from django.urls import include, path
from django.conf.urls.static import static
from django.contrib import admin
from django.views.generic import TemplateView
from django.views import defaults as default_views
urlpatterns = [
path("", TemplateView.as_view(template_name="pages/home.html"), name="home"),
# Django Admin, use {% url 'admin:index' %}
path(settings.ADMIN_URL, admin.site.urls),
# User management
path(
"users/",
include("backend_test.users.urls", namespace="users"),
),
# Your stuff: custom urls includes go here
path(
"menu/",
include(("backend_test.menu.urls", "travel"), namespace="menu"),
),
] + static(
settings.MEDIA_URL, document_root=settings.MEDIA_ROOT
)
if settings.DEBUG:
# This allows the error pages to be debugged during development, just visit
# these url in browser to see how these error pages look like.
urlpatterns += [
path(
"400/",
default_views.bad_request,
kwargs={"exception": Exception("Bad Request!")},
),
path(
"403/",
default_views.permission_denied,
kwargs={"exception": Exception("Permission Denied")},
),
path(
"404/",
default_views.page_not_found,
kwargs={"exception": Exception("Page not Found")},
),
path("500/", default_views.server_error),
]
if "debug_toolbar" in settings.INSTALLED_APPS:
import debug_toolbar
urlpatterns = [path("__debug__/", include(debug_toolbar.urls))] + urlpatterns
| 32.27451
| 85
| 0.63062
|
from django.conf import settings
from django.urls import include, path
from django.conf.urls.static import static
from django.contrib import admin
from django.views.generic import TemplateView
from django.views import defaults as default_views
urlpatterns = [
path("", TemplateView.as_view(template_name="pages/home.html"), name="home"),
path(settings.ADMIN_URL, admin.site.urls),
path(
"users/",
include("backend_test.users.urls", namespace="users"),
),
path(
"menu/",
include(("backend_test.menu.urls", "travel"), namespace="menu"),
),
] + static(
settings.MEDIA_URL, document_root=settings.MEDIA_ROOT
)
if settings.DEBUG:
urlpatterns += [
path(
"400/",
default_views.bad_request,
kwargs={"exception": Exception("Bad Request!")},
),
path(
"403/",
default_views.permission_denied,
kwargs={"exception": Exception("Permission Denied")},
),
path(
"404/",
default_views.page_not_found,
kwargs={"exception": Exception("Page not Found")},
),
path("500/", default_views.server_error),
]
if "debug_toolbar" in settings.INSTALLED_APPS:
import debug_toolbar
urlpatterns = [path("__debug__/", include(debug_toolbar.urls))] + urlpatterns
| true
| true
|
1c47f26627f3856f7841720cb5e66db2363667e0
| 16,289
|
py
|
Python
|
mongo-python-driver/setup.py
|
dlminvestments/IBM-Python-VM-1
|
27d06d19d96c170346c03ed4dc66587cbe0bc28f
|
[
"Apache-2.0"
] | null | null | null |
mongo-python-driver/setup.py
|
dlminvestments/IBM-Python-VM-1
|
27d06d19d96c170346c03ed4dc66587cbe0bc28f
|
[
"Apache-2.0"
] | 474
|
2020-12-23T22:22:55.000Z
|
2022-03-30T18:10:11.000Z
|
mongo-python-driver/setup.py
|
dlminvestments/IBM-Python-VM-1
|
27d06d19d96c170346c03ed4dc66587cbe0bc28f
|
[
"Apache-2.0"
] | 1
|
2020-12-23T23:56:24.000Z
|
2020-12-23T23:56:24.000Z
|
import os
import platform
import re
import sys
import warnings
if sys.version_info[:2] < (2, 7):
raise RuntimeError("Python version >= 2.7 required.")
# Hack to silence atexit traceback in some Python versions
try:
import multiprocessing
except ImportError:
pass
# Don't force people to install setuptools unless
# we have to.
try:
from setuptools import setup, __version__ as _setuptools_version
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, __version__ as _setuptools_version
from distutils.cmd import Command
from distutils.command.build_ext import build_ext
from distutils.errors import CCompilerError, DistutilsOptionError
from distutils.errors import DistutilsPlatformError, DistutilsExecError
from distutils.core import Extension
_HAVE_SPHINX = True
try:
from sphinx.cmd import build as sphinx
except ImportError:
try:
import sphinx
except ImportError:
_HAVE_SPHINX = False
version = "4.0.dev0"
f = open("README.rst")
try:
try:
readme_content = f.read()
except:
readme_content = ""
finally:
f.close()
# PYTHON-654 - Clang doesn't support -mno-fused-madd but the pythons Apple
# ships are built with it. This is a problem starting with Xcode 5.1
# since clang 3.4 errors out when it encounters unrecognized compiler
# flags. This hack removes -mno-fused-madd from the CFLAGS automatically
# generated by distutils for Apple provided pythons, allowing C extension
# builds to complete without error. The inspiration comes from older
# versions of distutils.sysconfig.get_config_vars.
if sys.platform == 'darwin' and 'clang' in platform.python_compiler().lower():
from distutils.sysconfig import get_config_vars
res = get_config_vars()
for key in ('CFLAGS', 'PY_CFLAGS'):
if key in res:
flags = res[key]
flags = re.sub('-mno-fused-madd', '', flags)
res[key] = flags
class test(Command):
description = "run the tests"
user_options = [
("test-module=", "m", "Discover tests in specified module"),
("test-suite=", "s",
"Test suite to run (e.g. 'some_module.test_suite')"),
("failfast", "f", "Stop running tests on first failure or error"),
("xunit-output=", "x",
"Generate a results directory with XUnit XML format")
]
def initialize_options(self):
self.test_module = None
self.test_suite = None
self.failfast = False
self.xunit_output = None
def finalize_options(self):
if self.test_suite is None and self.test_module is None:
self.test_module = 'test'
elif self.test_module is not None and self.test_suite is not None:
raise DistutilsOptionError(
"You may specify a module or suite, but not both"
)
def run(self):
# Installing required packages, running egg_info and build_ext are
# part of normal operation for setuptools.command.test.test
if self.distribution.install_requires:
self.distribution.fetch_build_eggs(
self.distribution.install_requires)
if self.distribution.tests_require:
self.distribution.fetch_build_eggs(self.distribution.tests_require)
if self.xunit_output:
self.distribution.fetch_build_eggs(["unittest-xml-reporting"])
self.run_command('egg_info')
build_ext_cmd = self.reinitialize_command('build_ext')
build_ext_cmd.inplace = 1
self.run_command('build_ext')
# Construct a TextTestRunner directly from the unittest imported from
# test, which creates a TestResult that supports the 'addSkip' method.
# setuptools will by default create a TextTestRunner that uses the old
# TestResult class.
from test import unittest, PymongoTestRunner, test_cases
if self.test_suite is None:
all_tests = unittest.defaultTestLoader.discover(self.test_module)
suite = unittest.TestSuite()
suite.addTests(sorted(test_cases(all_tests),
key=lambda x: x.__module__))
else:
suite = unittest.defaultTestLoader.loadTestsFromName(
self.test_suite)
if self.xunit_output:
from test import PymongoXMLTestRunner
runner = PymongoXMLTestRunner(verbosity=2, failfast=self.failfast,
output=self.xunit_output)
else:
runner = PymongoTestRunner(verbosity=2, failfast=self.failfast)
result = runner.run(suite)
sys.exit(not result.wasSuccessful())
class doc(Command):
description = "generate or test documentation"
user_options = [("test", "t",
"run doctests instead of generating documentation")]
boolean_options = ["test"]
def initialize_options(self):
self.test = False
def finalize_options(self):
pass
def run(self):
if not _HAVE_SPHINX:
raise RuntimeError(
"You must install Sphinx to build or test the documentation.")
if sys.version_info[0] >= 3:
import doctest
from doctest import OutputChecker as _OutputChecker
# Match u or U (possibly followed by r or R), removing it.
# r/R can follow u/U but not precede it. Don't match the
# single character string 'u' or 'U'.
_u_literal_re = re.compile(
r"(\W|^)(?<![\'\"])[uU]([rR]?[\'\"])", re.UNICODE)
# Match b or B (possibly followed by r or R), removing.
# r/R can follow b/B but not precede it. Don't match the
# single character string 'b' or 'B'.
_b_literal_re = re.compile(
r"(\W|^)(?<![\'\"])[bB]([rR]?[\'\"])", re.UNICODE)
class _StringPrefixFixer(_OutputChecker):
def check_output(self, want, got, optionflags):
# The docstrings are written with python 2.x in mind.
# To make the doctests pass in python 3 we have to
# strip the 'u' prefix from the expected results. The
# actual results won't have that prefix.
want = re.sub(_u_literal_re, r'\1\2', want)
# We also have to strip the 'b' prefix from the actual
# results since python 2.x expected results won't have
# that prefix.
got = re.sub(_b_literal_re, r'\1\2', got)
return super(
_StringPrefixFixer, self).check_output(
want, got, optionflags)
def output_difference(self, example, got, optionflags):
example.want = re.sub(_u_literal_re, r'\1\2', example.want)
got = re.sub(_b_literal_re, r'\1\2', got)
return super(
_StringPrefixFixer, self).output_difference(
example, got, optionflags)
doctest.OutputChecker = _StringPrefixFixer
if self.test:
path = os.path.join(
os.path.abspath('.'), "doc", "_build", "doctest")
mode = "doctest"
else:
path = os.path.join(
os.path.abspath('.'), "doc", "_build", version)
mode = "html"
try:
os.makedirs(path)
except:
pass
sphinx_args = ["-E", "-b", mode, "doc", path]
# sphinx.main calls sys.exit when sphinx.build_main exists.
# Call build_main directly so we can check status and print
# the full path to the built docs.
if hasattr(sphinx, 'build_main'):
status = sphinx.build_main(sphinx_args)
else:
status = sphinx.main(sphinx_args)
if status:
raise RuntimeError("documentation step '%s' failed" % (mode,))
sys.stdout.write("\nDocumentation step '%s' performed, results here:\n"
" %s/\n" % (mode, path))
if sys.platform == 'win32':
# distutils.msvc9compiler can raise an IOError when failing to
# find the compiler
build_errors = (CCompilerError, DistutilsExecError,
DistutilsPlatformError, IOError)
else:
build_errors = (CCompilerError, DistutilsExecError, DistutilsPlatformError)
class custom_build_ext(build_ext):
"""Allow C extension building to fail.
The C extension speeds up BSON encoding, but is not essential.
"""
warning_message = """
********************************************************************
WARNING: %s could not
be compiled. No C extensions are essential for PyMongo to run,
although they do result in significant speed improvements.
%s
Please see the installation docs for solutions to build issues:
https://pymongo.readthedocs.io/en/stable/installation.html
Here are some hints for popular operating systems:
If you are seeing this message on Linux you probably need to
install GCC and/or the Python development package for your
version of Python.
Debian and Ubuntu users should issue the following command:
$ sudo apt-get install build-essential python-dev
Users of Red Hat based distributions (RHEL, CentOS, Amazon Linux,
Oracle Linux, Fedora, etc.) should issue the following command:
$ sudo yum install gcc python-devel
If you are seeing this message on Microsoft Windows please install
PyMongo using pip. Modern versions of pip will install PyMongo
from binary wheels available on pypi. If you must install from
source read the documentation here:
https://pymongo.readthedocs.io/en/stable/installation.html#installing-from-source-on-windows
If you are seeing this message on macOS / OSX please install PyMongo
using pip. Modern versions of pip will install PyMongo from binary
wheels available on pypi. If wheels are not available for your version
of macOS / OSX, or you must install from source read the documentation
here:
https://pymongo.readthedocs.io/en/stable/installation.html#osx
********************************************************************
"""
def run(self):
try:
build_ext.run(self)
except DistutilsPlatformError:
e = sys.exc_info()[1]
sys.stdout.write('%s\n' % str(e))
warnings.warn(self.warning_message % ("Extension modules",
"There was an issue with "
"your platform configuration"
" - see above."))
def build_extension(self, ext):
name = ext.name
try:
build_ext.build_extension(self, ext)
except build_errors:
e = sys.exc_info()[1]
sys.stdout.write('%s\n' % str(e))
warnings.warn(self.warning_message % ("The %s extension "
"module" % (name,),
"The output above "
"this warning shows how "
"the compilation "
"failed."))
ext_modules = [Extension('bson._cbson',
include_dirs=['bson'],
sources=['bson/_cbsonmodule.c',
'bson/time64.c',
'bson/buffer.c',
'bson/encoding_helpers.c']),
Extension('pymongo._cmessage',
include_dirs=['bson'],
sources=['pymongo/_cmessagemodule.c',
'bson/buffer.c'])]
# PyOpenSSL 17.0.0 introduced support for OCSP. 17.1.0 introduced
# a related feature we need. 17.2.0 fixes a bug
# in set_default_verify_paths we should really avoid.
# service_identity 18.1.0 introduced support for IP addr matching.
pyopenssl_reqs = ["pyopenssl>=17.2.0", "requests<3.0.0", "service_identity>=18.1.0"]
extras_require = {
'encryption': ['pymongocrypt<2.0.0'],
'ocsp': pyopenssl_reqs,
'snappy': ['python-snappy'],
'tls': [],
'zstd': ['zstandard'],
'aws': ['pymongo-auth-aws<2.0.0'],
}
# https://jira.mongodb.org/browse/PYTHON-2117
# Environment marker support didn't settle down until version 20.10
# https://setuptools.readthedocs.io/en/latest/history.html#v20-10-0
_use_env_markers = tuple(map(int, _setuptools_version.split('.')[:2])) > (20, 9)
# TLS and DNS extras
# We install PyOpenSSL and service_identity for Python < 2.7.9 to
# get support for SNI, which is required to connection to Altas
# free and shared tier.
if sys.version_info[0] == 2:
if _use_env_markers:
# For building wheels on Python versions >= 2.7.9
for req in pyopenssl_reqs:
extras_require['tls'].append(
"%s ; python_full_version < '2.7.9'" % (req,))
if sys.platform == 'win32':
extras_require['tls'].append(
"wincertstore>=0.2 ; python_full_version < '2.7.9'")
else:
extras_require['tls'].append(
"certifi ; python_full_version < '2.7.9'")
elif sys.version_info < (2, 7, 9):
# For installing from source or egg files on Python versions
# older than 2.7.9, or systems that have setuptools versions
# older than 20.10.
extras_require['tls'].extend(pyopenssl_reqs)
if sys.platform == 'win32':
extras_require['tls'].append("wincertstore>=0.2")
else:
extras_require['tls'].append("certifi")
extras_require.update({'srv': ["dnspython>=1.16.0,<1.17.0"]})
extras_require.update({'tls': ["ipaddress"]})
else:
extras_require.update({'srv': ["dnspython>=1.16.0,<2.0.0"]})
# GSSAPI extras
if sys.platform == 'win32':
extras_require['gssapi'] = ["winkerberos>=0.5.0"]
else:
extras_require['gssapi'] = ["pykerberos"]
extra_opts = {
"packages": ["bson", "pymongo", "gridfs"]
}
if "--no_ext" in sys.argv:
sys.argv.remove("--no_ext")
elif (sys.platform.startswith("java") or
sys.platform == "cli" or
"PyPy" in sys.version):
sys.stdout.write("""
*****************************************************\n
The optional C extensions are currently not supported\n
by this python implementation.\n
*****************************************************\n
""")
else:
extra_opts['ext_modules'] = ext_modules
setup(
name="pymongo",
version=version,
description="Python driver for MongoDB <http://www.mongodb.org>",
long_description=readme_content,
author="Mike Dirolf",
author_email="mongodb-user@googlegroups.com",
maintainer="Bernie Hackett",
maintainer_email="bernie@mongodb.com",
url="http://github.com/mongodb/mongo-python-driver",
keywords=["mongo", "mongodb", "pymongo", "gridfs", "bson"],
install_requires=[],
license="Apache License, Version 2.0",
python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: MacOS :: MacOS X",
"Operating System :: Microsoft :: Windows",
"Operating System :: POSIX",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Topic :: Database"],
cmdclass={"build_ext": custom_build_ext,
"doc": doc,
"test": test},
extras_require=extras_require,
**extra_opts
)
| 38.783333
| 92
| 0.599362
|
import os
import platform
import re
import sys
import warnings
if sys.version_info[:2] < (2, 7):
raise RuntimeError("Python version >= 2.7 required.")
try:
import multiprocessing
except ImportError:
pass
# we have to.
try:
from setuptools import setup, __version__ as _setuptools_version
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, __version__ as _setuptools_version
from distutils.cmd import Command
from distutils.command.build_ext import build_ext
from distutils.errors import CCompilerError, DistutilsOptionError
from distutils.errors import DistutilsPlatformError, DistutilsExecError
from distutils.core import Extension
_HAVE_SPHINX = True
try:
from sphinx.cmd import build as sphinx
except ImportError:
try:
import sphinx
except ImportError:
_HAVE_SPHINX = False
version = "4.0.dev0"
f = open("README.rst")
try:
try:
readme_content = f.read()
except:
readme_content = ""
finally:
f.close()
# PYTHON-654 - Clang doesn't support -mno-fused-madd but the pythons Apple
if sys.platform == 'darwin' and 'clang' in platform.python_compiler().lower():
from distutils.sysconfig import get_config_vars
res = get_config_vars()
for key in ('CFLAGS', 'PY_CFLAGS'):
if key in res:
flags = res[key]
flags = re.sub('-mno-fused-madd', '', flags)
res[key] = flags
class test(Command):
description = "run the tests"
user_options = [
("test-module=", "m", "Discover tests in specified module"),
("test-suite=", "s",
"Test suite to run (e.g. 'some_module.test_suite')"),
("failfast", "f", "Stop running tests on first failure or error"),
("xunit-output=", "x",
"Generate a results directory with XUnit XML format")
]
def initialize_options(self):
self.test_module = None
self.test_suite = None
self.failfast = False
self.xunit_output = None
def finalize_options(self):
if self.test_suite is None and self.test_module is None:
self.test_module = 'test'
elif self.test_module is not None and self.test_suite is not None:
raise DistutilsOptionError(
"You may specify a module or suite, but not both"
)
def run(self):
if self.distribution.install_requires:
self.distribution.fetch_build_eggs(
self.distribution.install_requires)
if self.distribution.tests_require:
self.distribution.fetch_build_eggs(self.distribution.tests_require)
if self.xunit_output:
self.distribution.fetch_build_eggs(["unittest-xml-reporting"])
self.run_command('egg_info')
build_ext_cmd = self.reinitialize_command('build_ext')
build_ext_cmd.inplace = 1
self.run_command('build_ext')
from test import unittest, PymongoTestRunner, test_cases
if self.test_suite is None:
all_tests = unittest.defaultTestLoader.discover(self.test_module)
suite = unittest.TestSuite()
suite.addTests(sorted(test_cases(all_tests),
key=lambda x: x.__module__))
else:
suite = unittest.defaultTestLoader.loadTestsFromName(
self.test_suite)
if self.xunit_output:
from test import PymongoXMLTestRunner
runner = PymongoXMLTestRunner(verbosity=2, failfast=self.failfast,
output=self.xunit_output)
else:
runner = PymongoTestRunner(verbosity=2, failfast=self.failfast)
result = runner.run(suite)
sys.exit(not result.wasSuccessful())
class doc(Command):
description = "generate or test documentation"
user_options = [("test", "t",
"run doctests instead of generating documentation")]
boolean_options = ["test"]
def initialize_options(self):
self.test = False
def finalize_options(self):
pass
def run(self):
if not _HAVE_SPHINX:
raise RuntimeError(
"You must install Sphinx to build or test the documentation.")
if sys.version_info[0] >= 3:
import doctest
from doctest import OutputChecker as _OutputChecker
# single character string 'u' or 'U'.
_u_literal_re = re.compile(
r"(\W|^)(?<![\'\"])[uU]([rR]?[\'\"])", re.UNICODE)
# Match b or B (possibly followed by r or R), removing.
# r/R can follow b/B but not precede it. Don't match the
_b_literal_re = re.compile(
r"(\W|^)(?<![\'\"])[bB]([rR]?[\'\"])", re.UNICODE)
class _StringPrefixFixer(_OutputChecker):
def check_output(self, want, got, optionflags):
want = re.sub(_u_literal_re, r'\1\2', want)
# We also have to strip the 'b' prefix from the actual
# results since python 2.x expected results won't have
got = re.sub(_b_literal_re, r'\1\2', got)
return super(
_StringPrefixFixer, self).check_output(
want, got, optionflags)
def output_difference(self, example, got, optionflags):
example.want = re.sub(_u_literal_re, r'\1\2', example.want)
got = re.sub(_b_literal_re, r'\1\2', got)
return super(
_StringPrefixFixer, self).output_difference(
example, got, optionflags)
doctest.OutputChecker = _StringPrefixFixer
if self.test:
path = os.path.join(
os.path.abspath('.'), "doc", "_build", "doctest")
mode = "doctest"
else:
path = os.path.join(
os.path.abspath('.'), "doc", "_build", version)
mode = "html"
try:
os.makedirs(path)
except:
pass
sphinx_args = ["-E", "-b", mode, "doc", path]
if hasattr(sphinx, 'build_main'):
status = sphinx.build_main(sphinx_args)
else:
status = sphinx.main(sphinx_args)
if status:
raise RuntimeError("documentation step '%s' failed" % (mode,))
sys.stdout.write("\nDocumentation step '%s' performed, results here:\n"
" %s/\n" % (mode, path))
if sys.platform == 'win32':
build_errors = (CCompilerError, DistutilsExecError,
DistutilsPlatformError, IOError)
else:
build_errors = (CCompilerError, DistutilsExecError, DistutilsPlatformError)
class custom_build_ext(build_ext):
warning_message = """
********************************************************************
WARNING: %s could not
be compiled. No C extensions are essential for PyMongo to run,
although they do result in significant speed improvements.
%s
Please see the installation docs for solutions to build issues:
https://pymongo.readthedocs.io/en/stable/installation.html
Here are some hints for popular operating systems:
If you are seeing this message on Linux you probably need to
install GCC and/or the Python development package for your
version of Python.
Debian and Ubuntu users should issue the following command:
$ sudo apt-get install build-essential python-dev
Users of Red Hat based distributions (RHEL, CentOS, Amazon Linux,
Oracle Linux, Fedora, etc.) should issue the following command:
$ sudo yum install gcc python-devel
If you are seeing this message on Microsoft Windows please install
PyMongo using pip. Modern versions of pip will install PyMongo
from binary wheels available on pypi. If you must install from
source read the documentation here:
https://pymongo.readthedocs.io/en/stable/installation.html#installing-from-source-on-windows
If you are seeing this message on macOS / OSX please install PyMongo
using pip. Modern versions of pip will install PyMongo from binary
wheels available on pypi. If wheels are not available for your version
of macOS / OSX, or you must install from source read the documentation
here:
https://pymongo.readthedocs.io/en/stable/installation.html#osx
********************************************************************
"""
def run(self):
try:
build_ext.run(self)
except DistutilsPlatformError:
e = sys.exc_info()[1]
sys.stdout.write('%s\n' % str(e))
warnings.warn(self.warning_message % ("Extension modules",
"There was an issue with "
"your platform configuration"
" - see above."))
def build_extension(self, ext):
name = ext.name
try:
build_ext.build_extension(self, ext)
except build_errors:
e = sys.exc_info()[1]
sys.stdout.write('%s\n' % str(e))
warnings.warn(self.warning_message % ("The %s extension "
"module" % (name,),
"The output above "
"this warning shows how "
"the compilation "
"failed."))
ext_modules = [Extension('bson._cbson',
include_dirs=['bson'],
sources=['bson/_cbsonmodule.c',
'bson/time64.c',
'bson/buffer.c',
'bson/encoding_helpers.c']),
Extension('pymongo._cmessage',
include_dirs=['bson'],
sources=['pymongo/_cmessagemodule.c',
'bson/buffer.c'])]
pyopenssl_reqs = ["pyopenssl>=17.2.0", "requests<3.0.0", "service_identity>=18.1.0"]
extras_require = {
'encryption': ['pymongocrypt<2.0.0'],
'ocsp': pyopenssl_reqs,
'snappy': ['python-snappy'],
'tls': [],
'zstd': ['zstandard'],
'aws': ['pymongo-auth-aws<2.0.0'],
}
# https://setuptools.readthedocs.io/en/latest/history.html#v20-10-0
_use_env_markers = tuple(map(int, _setuptools_version.split('.')[:2])) > (20, 9)
# TLS and DNS extras
# We install PyOpenSSL and service_identity for Python < 2.7.9 to
# get support for SNI, which is required to connection to Altas
# free and shared tier.
if sys.version_info[0] == 2:
if _use_env_markers:
# For building wheels on Python versions >= 2.7.9
for req in pyopenssl_reqs:
extras_require['tls'].append(
"%s ; python_full_version < '2.7.9'" % (req,))
if sys.platform == 'win32':
extras_require['tls'].append(
"wincertstore>=0.2 ; python_full_version < '2.7.9'")
else:
extras_require['tls'].append(
"certifi ; python_full_version < '2.7.9'")
elif sys.version_info < (2, 7, 9):
# For installing from source or egg files on Python versions
# older than 2.7.9, or systems that have setuptools versions
# older than 20.10.
extras_require['tls'].extend(pyopenssl_reqs)
if sys.platform == 'win32':
extras_require['tls'].append("wincertstore>=0.2")
else:
extras_require['tls'].append("certifi")
extras_require.update({'srv': ["dnspython>=1.16.0,<1.17.0"]})
extras_require.update({'tls': ["ipaddress"]})
else:
extras_require.update({'srv': ["dnspython>=1.16.0,<2.0.0"]})
# GSSAPI extras
if sys.platform == 'win32':
extras_require['gssapi'] = ["winkerberos>=0.5.0"]
else:
extras_require['gssapi'] = ["pykerberos"]
extra_opts = {
"packages": ["bson", "pymongo", "gridfs"]
}
if "--no_ext" in sys.argv:
sys.argv.remove("--no_ext")
elif (sys.platform.startswith("java") or
sys.platform == "cli" or
"PyPy" in sys.version):
sys.stdout.write("""
*****************************************************\n
The optional C extensions are currently not supported\n
by this python implementation.\n
*****************************************************\n
""")
else:
extra_opts['ext_modules'] = ext_modules
setup(
name="pymongo",
version=version,
description="Python driver for MongoDB <http://www.mongodb.org>",
long_description=readme_content,
author="Mike Dirolf",
author_email="mongodb-user@googlegroups.com",
maintainer="Bernie Hackett",
maintainer_email="bernie@mongodb.com",
url="http://github.com/mongodb/mongo-python-driver",
keywords=["mongo", "mongodb", "pymongo", "gridfs", "bson"],
install_requires=[],
license="Apache License, Version 2.0",
python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: MacOS :: MacOS X",
"Operating System :: Microsoft :: Windows",
"Operating System :: POSIX",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Topic :: Database"],
cmdclass={"build_ext": custom_build_ext,
"doc": doc,
"test": test},
extras_require=extras_require,
**extra_opts
)
| true
| true
|
1c47f334f6f5c914d25a539e1d094e9034d15110
| 7,149
|
py
|
Python
|
modeling/dynamics/bullet/bdbody.py
|
takuya-ki/wrs
|
f6e1009b94332504042fbde9b39323410394ecde
|
[
"MIT"
] | 23
|
2021-04-02T09:02:04.000Z
|
2022-03-22T05:31:03.000Z
|
modeling/dynamics/bullet/bdbody.py
|
takuya-ki/wrs
|
f6e1009b94332504042fbde9b39323410394ecde
|
[
"MIT"
] | 35
|
2021-04-12T09:41:05.000Z
|
2022-03-26T13:32:46.000Z
|
modeling/dynamics/bullet/bdbody.py
|
takuya-ki/wrs
|
f6e1009b94332504042fbde9b39323410394ecde
|
[
"MIT"
] | 16
|
2021-03-30T11:55:45.000Z
|
2022-03-30T07:10:59.000Z
|
from panda3d.bullet import BulletRigidBodyNode
from panda3d.bullet import BulletTriangleMesh
from panda3d.bullet import BulletTriangleMeshShape
from panda3d.bullet import BulletConvexHullShape, BulletBoxShape
from panda3d.core import TransformState, Vec3, GeomVertexRewriter, CollisionBox, Point3
import copy
import modeling.geometric_model as gm
import basis.data_adapter as dh
import basis.robot_math as rm
import numpy as np
class BDBody(BulletRigidBodyNode):
def __init__(self,
initor,
cdtype="triangles",
mass=.3,
restitution=0,
allow_deactivation=False,
allow_ccd=True,
friction=.2,
dynamic=True,
name="rbd"):
"""
TODO: triangles do not seem to work (very slow) in the github version (20210418)
Use convex if possible
:param initor: could be itself (copy), or an instance of collision model
:param type: triangle or convex
:param mass:
:param restitution: bounce parameter
:param friction:
:param dynamic: only applicable to triangle type, if an object does not move with force, it is not dynamic
:param name:
author: weiwei
date: 20190626, 20201119
"""
super().__init__(name)
if isinstance(initor, gm.GeometricModel):
if initor._objtrm is None:
raise ValueError("Only applicable to models with a trimesh!")
self.com = initor.objtrm.center_mass * base.physics_scale
self.setMass(mass)
self.setRestitution(restitution)
self.setFriction(friction)
self.setLinearDamping(.3)
self.setAngularDamping(.3)
if allow_deactivation:
self.setDeactivationEnabled(True)
self.setLinearSleepThreshold(.01*base.physics_scale)
self.setAngularSleepThreshold(.01*base.physics_scale)
else:
self.setDeactivationEnabled(False)
if allow_ccd: # continuous collision detection
self.setCcdMotionThreshold(1e-7)
self.setCcdSweptSphereRadius(0.0005*base.physics_scale)
geom_np = initor.objpdnp.getChild(0).find("+GeomNode")
geom = copy.deepcopy(geom_np.node().getGeom(0))
vdata = geom.modifyVertexData()
vertices = copy.deepcopy(np.frombuffer(vdata.modifyArrayHandle(0).getData(), dtype=np.float32))
vertices.shape=(-1,6)
vertices[:, :3]=vertices[:, :3]*base.physics_scale-self.com
vdata.modifyArrayHandle(0).setData(vertices.astype(np.float32).tobytes())
geomtf = geom_np.getTransform()
geomtf = geomtf.setPos(geomtf.getPos()*base.physics_scale)
if cdtype == "triangles":
geombmesh = BulletTriangleMesh()
geombmesh.addGeom(geom)
bulletshape = BulletTriangleMeshShape(geombmesh, dynamic=dynamic)
bulletshape.setMargin(1e-6)
self.addShape(bulletshape, geomtf)
elif cdtype == "convex":
bulletshape = BulletConvexHullShape() # TODO: compute a convex hull?
bulletshape.addGeom(geom, geomtf)
bulletshape.setMargin(1e-6)
self.addShape(bulletshape, geomtf)
elif cdtype == 'box':
minx = min(vertices[:,0])
miny = min(vertices[:,1])
minz = min(vertices[:,2])
maxx = max(vertices[:,0])
maxy = max(vertices[:,1])
maxz = max(vertices[:,2])
pcd_box = CollisionBox(Point3(minx, miny, minz),Point3(maxx, maxy, maxz))
bulletshape = BulletBoxShape.makeFromSolid(pcd_box)
bulletshape.setMargin(1e-6)
self.addShape(bulletshape, geomtf)
else:
raise NotImplementedError
pd_homomat = geomtf.getMat()
pd_com_pos = pd_homomat.xformPoint(Vec3(self.com[0], self.com[1], self.com[2]))
np_homomat = dh.pdmat4_to_npmat4(pd_homomat)
np_com_pos = dh.pdv3_to_npv3(pd_com_pos)
np_homomat[:3, 3] = np_com_pos # update center to com
self.setTransform(TransformState.makeMat(dh.npmat4_to_pdmat4(np_homomat)))
elif isinstance(initor, BDBody):
self.com = initor.com.copy()
self.setMass(initor.getMass())
self.setRestitution(initor.restitution)
self.setFriction(initor.friction)
self.setLinearDamping(.3)
self.setAngularDamping(.3)
if allow_deactivation:
self.setDeactivationEnabled(True)
self.setLinearSleepThreshold(.01*base.physics_scale)
self.setAngularSleepThreshold(.01*base.physics_scale)
else:
self.setDeactivationEnabled(False)
if allow_ccd:
self.setCcdMotionThreshold(1e-7)
self.setCcdSweptSphereRadius(0.0005*base.physics_scale)
np_homomat = copy.deepcopy(initor.get_homomat())
np_homomat[:3,3] = np_homomat[:3,3]*base.physics_scale
self.setTransform(TransformState.makeMat(dh.npmat4_to_pdmat4(np_homomat)))
self.addShape(initor.getShape(0), initor.getShapeTransform(0))
def get_pos(self):
pdmat4 = self.getTransform().getMat()
pdv3 = pdmat4.xformPoint(Vec3(-self.com[0], -self.com[1], -self.com[2]))
pos = dh.pdv3_to_npv3(pdv3)/base.physics_scale
return pos
def set_pos(self, npvec3):
self.setPos(dh.pdv3_to_npv3(npvec3)*base.physics_scale)
def get_homomat(self):
"""
get the homomat considering the original local frame
the dynamic body moves in a local frame defined at com (line 46 of this file), instead of returning the
homomat of the dynamic body, this file returns the pose of original local frame
the returned homomat can be used by collision bodies for rendering.
:return:
author: weiwei
date: 2019?, 20201119
"""
pd_homomat = self.getTransform().getMat()
pd_com_pos = pd_homomat.xformPoint(Vec3(-self.com[0], -self.com[1], -self.com[2]))
np_homomat = dh.pdmat4_to_npmat4(pd_homomat)
np_com_pos = dh.pdv3_to_npv3(pd_com_pos)
np_homomat[:3, 3] = np_com_pos/base.physics_scale
return np_homomat
def set_homomat(self, homomat):
"""
set the pose of the dynamic body
:param homomat: the homomat of the original frame (the collision model)
:return:
author: weiwei
date: 2019?, 20201119
"""
tmp_homomat = copy.deepcopy(homomat)
tmp_homomat[:3, 3] = tmp_homomat[:3,3]*base.physics_scale
pos = rm.homomat_transform_points(tmp_homomat, self.com)
rotmat = tmp_homomat[:3, :3]
self.setTransform(TransformState.makeMat(dh.npv3mat3_to_pdmat4(pos, rotmat)))
def copy(self):
return BDBody(self)
| 44.962264
| 114
| 0.615051
|
from panda3d.bullet import BulletRigidBodyNode
from panda3d.bullet import BulletTriangleMesh
from panda3d.bullet import BulletTriangleMeshShape
from panda3d.bullet import BulletConvexHullShape, BulletBoxShape
from panda3d.core import TransformState, Vec3, GeomVertexRewriter, CollisionBox, Point3
import copy
import modeling.geometric_model as gm
import basis.data_adapter as dh
import basis.robot_math as rm
import numpy as np
class BDBody(BulletRigidBodyNode):
def __init__(self,
initor,
cdtype="triangles",
mass=.3,
restitution=0,
allow_deactivation=False,
allow_ccd=True,
friction=.2,
dynamic=True,
name="rbd"):
super().__init__(name)
if isinstance(initor, gm.GeometricModel):
if initor._objtrm is None:
raise ValueError("Only applicable to models with a trimesh!")
self.com = initor.objtrm.center_mass * base.physics_scale
self.setMass(mass)
self.setRestitution(restitution)
self.setFriction(friction)
self.setLinearDamping(.3)
self.setAngularDamping(.3)
if allow_deactivation:
self.setDeactivationEnabled(True)
self.setLinearSleepThreshold(.01*base.physics_scale)
self.setAngularSleepThreshold(.01*base.physics_scale)
else:
self.setDeactivationEnabled(False)
if allow_ccd:
self.setCcdMotionThreshold(1e-7)
self.setCcdSweptSphereRadius(0.0005*base.physics_scale)
geom_np = initor.objpdnp.getChild(0).find("+GeomNode")
geom = copy.deepcopy(geom_np.node().getGeom(0))
vdata = geom.modifyVertexData()
vertices = copy.deepcopy(np.frombuffer(vdata.modifyArrayHandle(0).getData(), dtype=np.float32))
vertices.shape=(-1,6)
vertices[:, :3]=vertices[:, :3]*base.physics_scale-self.com
vdata.modifyArrayHandle(0).setData(vertices.astype(np.float32).tobytes())
geomtf = geom_np.getTransform()
geomtf = geomtf.setPos(geomtf.getPos()*base.physics_scale)
if cdtype == "triangles":
geombmesh = BulletTriangleMesh()
geombmesh.addGeom(geom)
bulletshape = BulletTriangleMeshShape(geombmesh, dynamic=dynamic)
bulletshape.setMargin(1e-6)
self.addShape(bulletshape, geomtf)
elif cdtype == "convex":
bulletshape = BulletConvexHullShape()
bulletshape.addGeom(geom, geomtf)
bulletshape.setMargin(1e-6)
self.addShape(bulletshape, geomtf)
elif cdtype == 'box':
minx = min(vertices[:,0])
miny = min(vertices[:,1])
minz = min(vertices[:,2])
maxx = max(vertices[:,0])
maxy = max(vertices[:,1])
maxz = max(vertices[:,2])
pcd_box = CollisionBox(Point3(minx, miny, minz),Point3(maxx, maxy, maxz))
bulletshape = BulletBoxShape.makeFromSolid(pcd_box)
bulletshape.setMargin(1e-6)
self.addShape(bulletshape, geomtf)
else:
raise NotImplementedError
pd_homomat = geomtf.getMat()
pd_com_pos = pd_homomat.xformPoint(Vec3(self.com[0], self.com[1], self.com[2]))
np_homomat = dh.pdmat4_to_npmat4(pd_homomat)
np_com_pos = dh.pdv3_to_npv3(pd_com_pos)
np_homomat[:3, 3] = np_com_pos
self.setTransform(TransformState.makeMat(dh.npmat4_to_pdmat4(np_homomat)))
elif isinstance(initor, BDBody):
self.com = initor.com.copy()
self.setMass(initor.getMass())
self.setRestitution(initor.restitution)
self.setFriction(initor.friction)
self.setLinearDamping(.3)
self.setAngularDamping(.3)
if allow_deactivation:
self.setDeactivationEnabled(True)
self.setLinearSleepThreshold(.01*base.physics_scale)
self.setAngularSleepThreshold(.01*base.physics_scale)
else:
self.setDeactivationEnabled(False)
if allow_ccd:
self.setCcdMotionThreshold(1e-7)
self.setCcdSweptSphereRadius(0.0005*base.physics_scale)
np_homomat = copy.deepcopy(initor.get_homomat())
np_homomat[:3,3] = np_homomat[:3,3]*base.physics_scale
self.setTransform(TransformState.makeMat(dh.npmat4_to_pdmat4(np_homomat)))
self.addShape(initor.getShape(0), initor.getShapeTransform(0))
def get_pos(self):
pdmat4 = self.getTransform().getMat()
pdv3 = pdmat4.xformPoint(Vec3(-self.com[0], -self.com[1], -self.com[2]))
pos = dh.pdv3_to_npv3(pdv3)/base.physics_scale
return pos
def set_pos(self, npvec3):
self.setPos(dh.pdv3_to_npv3(npvec3)*base.physics_scale)
def get_homomat(self):
pd_homomat = self.getTransform().getMat()
pd_com_pos = pd_homomat.xformPoint(Vec3(-self.com[0], -self.com[1], -self.com[2]))
np_homomat = dh.pdmat4_to_npmat4(pd_homomat)
np_com_pos = dh.pdv3_to_npv3(pd_com_pos)
np_homomat[:3, 3] = np_com_pos/base.physics_scale
return np_homomat
def set_homomat(self, homomat):
tmp_homomat = copy.deepcopy(homomat)
tmp_homomat[:3, 3] = tmp_homomat[:3,3]*base.physics_scale
pos = rm.homomat_transform_points(tmp_homomat, self.com)
rotmat = tmp_homomat[:3, :3]
self.setTransform(TransformState.makeMat(dh.npv3mat3_to_pdmat4(pos, rotmat)))
def copy(self):
return BDBody(self)
| true
| true
|
1c47f43155595c95ee4e65fc813a2c0e931c4d26
| 6,564
|
py
|
Python
|
sympy/vector/tests/test_vector.py
|
FabianBall/sympy
|
9d849ddfc45427fe7f6733ce4d18fa397d0f43a9
|
[
"BSD-3-Clause"
] | 3
|
2015-01-17T23:15:04.000Z
|
2015-05-26T14:11:44.000Z
|
sympy/vector/tests/test_vector.py
|
FabianBall/sympy
|
9d849ddfc45427fe7f6733ce4d18fa397d0f43a9
|
[
"BSD-3-Clause"
] | 1
|
2017-08-26T01:07:46.000Z
|
2017-08-26T16:05:49.000Z
|
sympy/vector/tests/test_vector.py
|
FabianBall/sympy
|
9d849ddfc45427fe7f6733ce4d18fa397d0f43a9
|
[
"BSD-3-Clause"
] | null | null | null |
from sympy.core import S
from sympy.simplify import simplify, trigsimp
from sympy import pi, sqrt, symbols, ImmutableMatrix as Matrix, \
sin, cos, Function, Integral, Derivative, diff
from sympy.vector.vector import Vector, BaseVector, VectorAdd, \
VectorMul, VectorZero
from sympy.vector.coordsysrect import CoordSys3D
from sympy.vector.vector import Cross, Dot, dot, cross
C = CoordSys3D('C')
i, j, k = C.base_vectors()
a, b, c = symbols('a b c')
def test_cross():
v1 = C.x * i + C.z * C.z * j
v2 = C.x * i + C.y * j + C.z * k
assert Cross(v1, v2) == Cross(C.x*C.i + C.z**2*C.j, C.x*C.i + C.y*C.j + C.z*C.k)
assert Cross(v1, v2).doit() == C.z**3*C.i + (-C.x*C.z)*C.j + (C.x*C.y - C.x*C.z**2)*C.k
assert cross(v1, v2) == C.z**3*C.i + (-C.x*C.z)*C.j + (C.x*C.y - C.x*C.z**2)*C.k
assert Cross(v1, v2) == -Cross(v2, v1)
assert Cross(v1, v2) + Cross(v2, v1) == Vector.zero
def test_dot():
v1 = C.x * i + C.z * C.z * j
v2 = C.x * i + C.y * j + C.z * k
assert Dot(v1, v2) == Dot(C.x*C.i + C.z**2*C.j, C.x*C.i + C.y*C.j + C.z*C.k)
assert Dot(v1, v2).doit() == C.x**2 + C.y*C.z**2
assert Dot(v1, v2).doit() == C.x**2 + C.y*C.z**2
assert Dot(v1, v2) == Dot(v2, v1)
def test_vector_sympy():
"""
Test whether the Vector framework confirms to the hashing
and equality testing properties of SymPy.
"""
v1 = 3*j
assert v1 == j*3
assert v1.components == {j: 3}
v2 = 3*i + 4*j + 5*k
v3 = 2*i + 4*j + i + 4*k + k
assert v3 == v2
assert v3.__hash__() == v2.__hash__()
def test_vector():
assert isinstance(i, BaseVector)
assert i != j
assert j != k
assert k != i
assert i - i == Vector.zero
assert i + Vector.zero == i
assert i - Vector.zero == i
assert Vector.zero != 0
assert -Vector.zero == Vector.zero
v1 = a*i + b*j + c*k
v2 = a**2*i + b**2*j + c**2*k
v3 = v1 + v2
v4 = 2 * v1
v5 = a * i
assert isinstance(v1, VectorAdd)
assert v1 - v1 == Vector.zero
assert v1 + Vector.zero == v1
assert v1.dot(i) == a
assert v1.dot(j) == b
assert v1.dot(k) == c
assert i.dot(v2) == a**2
assert j.dot(v2) == b**2
assert k.dot(v2) == c**2
assert v3.dot(i) == a**2 + a
assert v3.dot(j) == b**2 + b
assert v3.dot(k) == c**2 + c
assert v1 + v2 == v2 + v1
assert v1 - v2 == -1 * (v2 - v1)
assert a * v1 == v1 * a
assert isinstance(v5, VectorMul)
assert v5.base_vector == i
assert v5.measure_number == a
assert isinstance(v4, Vector)
assert isinstance(v4, VectorAdd)
assert isinstance(v4, Vector)
assert isinstance(Vector.zero, VectorZero)
assert isinstance(Vector.zero, Vector)
assert isinstance(v1 * 0, VectorZero)
assert v1.to_matrix(C) == Matrix([[a], [b], [c]])
assert i.components == {i: 1}
assert v5.components == {i: a}
assert v1.components == {i: a, j: b, k: c}
assert VectorAdd(v1, Vector.zero) == v1
assert VectorMul(a, v1) == v1*a
assert VectorMul(1, i) == i
assert VectorAdd(v1, Vector.zero) == v1
assert VectorMul(0, Vector.zero) == Vector.zero
def test_vector_magnitude_normalize():
assert Vector.zero.magnitude() == 0
assert Vector.zero.normalize() == Vector.zero
assert i.magnitude() == 1
assert j.magnitude() == 1
assert k.magnitude() == 1
assert i.normalize() == i
assert j.normalize() == j
assert k.normalize() == k
v1 = a * i
assert v1.normalize() == (a/sqrt(a**2))*i
assert v1.magnitude() == sqrt(a**2)
v2 = a*i + b*j + c*k
assert v2.magnitude() == sqrt(a**2 + b**2 + c**2)
assert v2.normalize() == v2 / v2.magnitude()
v3 = i + j
assert v3.normalize() == (sqrt(2)/2)*C.i + (sqrt(2)/2)*C.j
def test_vector_simplify():
A, s, k, m = symbols('A, s, k, m')
test1 = (1 / a + 1 / b) * i
assert (test1 & i) != (a + b) / (a * b)
test1 = simplify(test1)
assert (test1 & i) == (a + b) / (a * b)
assert test1.simplify() == simplify(test1)
test2 = (A**2 * s**4 / (4 * pi * k * m**3)) * i
test2 = simplify(test2)
assert (test2 & i) == (A**2 * s**4 / (4 * pi * k * m**3))
test3 = ((4 + 4 * a - 2 * (2 + 2 * a)) / (2 + 2 * a)) * i
test3 = simplify(test3)
assert (test3 & i) == 0
test4 = ((-4 * a * b**2 - 2 * b**3 - 2 * a**2 * b) / (a + b)**2) * i
test4 = simplify(test4)
assert (test4 & i) == -2 * b
v = (sin(a)+cos(a))**2*i - j
assert trigsimp(v) == (2*sin(a + pi/4)**2)*i + (-1)*j
assert trigsimp(v) == v.trigsimp()
assert simplify(Vector.zero) == Vector.zero
def test_vector_dot():
assert i.dot(Vector.zero) == 0
assert Vector.zero.dot(i) == 0
assert i & Vector.zero == 0
assert i.dot(i) == 1
assert i.dot(j) == 0
assert i.dot(k) == 0
assert i & i == 1
assert i & j == 0
assert i & k == 0
assert j.dot(i) == 0
assert j.dot(j) == 1
assert j.dot(k) == 0
assert j & i == 0
assert j & j == 1
assert j & k == 0
assert k.dot(i) == 0
assert k.dot(j) == 0
assert k.dot(k) == 1
assert k & i == 0
assert k & j == 0
assert k & k == 1
def test_vector_cross():
assert i.cross(Vector.zero) == Vector.zero
assert Vector.zero.cross(i) == Vector.zero
assert i.cross(i) == Vector.zero
assert i.cross(j) == k
assert i.cross(k) == -j
assert i ^ i == Vector.zero
assert i ^ j == k
assert i ^ k == -j
assert j.cross(i) == -k
assert j.cross(j) == Vector.zero
assert j.cross(k) == i
assert j ^ i == -k
assert j ^ j == Vector.zero
assert j ^ k == i
assert k.cross(i) == j
assert k.cross(j) == -i
assert k.cross(k) == Vector.zero
assert k ^ i == j
assert k ^ j == -i
assert k ^ k == Vector.zero
def test_projection():
v1 = i + j + k
v2 = 3*i + 4*j
v3 = 0*i + 0*j
assert v1.projection(v1) == i + j + k
assert v1.projection(v2) == S(7)/3*C.i + S(7)/3*C.j + S(7)/3*C.k
assert v1.projection(v1, scalar=True) == 1
assert v1.projection(v2, scalar=True) == S(7)/3
assert v3.projection(v1) == Vector.zero
def test_vector_diff_integrate():
f = Function('f')
v = f(a)*C.i + a**2*C.j - C.k
assert Derivative(v, a) == Derivative((f(a))*C.i +
a**2*C.j + (-1)*C.k, a)
assert (diff(v, a) == v.diff(a) == Derivative(v, a).doit() ==
(Derivative(f(a), a))*C.i + 2*a*C.j)
assert (Integral(v, a) == (Integral(f(a), a))*C.i +
(Integral(a**2, a))*C.j + (Integral(-1, a))*C.k)
| 28.53913
| 91
| 0.534735
|
from sympy.core import S
from sympy.simplify import simplify, trigsimp
from sympy import pi, sqrt, symbols, ImmutableMatrix as Matrix, \
sin, cos, Function, Integral, Derivative, diff
from sympy.vector.vector import Vector, BaseVector, VectorAdd, \
VectorMul, VectorZero
from sympy.vector.coordsysrect import CoordSys3D
from sympy.vector.vector import Cross, Dot, dot, cross
C = CoordSys3D('C')
i, j, k = C.base_vectors()
a, b, c = symbols('a b c')
def test_cross():
v1 = C.x * i + C.z * C.z * j
v2 = C.x * i + C.y * j + C.z * k
assert Cross(v1, v2) == Cross(C.x*C.i + C.z**2*C.j, C.x*C.i + C.y*C.j + C.z*C.k)
assert Cross(v1, v2).doit() == C.z**3*C.i + (-C.x*C.z)*C.j + (C.x*C.y - C.x*C.z**2)*C.k
assert cross(v1, v2) == C.z**3*C.i + (-C.x*C.z)*C.j + (C.x*C.y - C.x*C.z**2)*C.k
assert Cross(v1, v2) == -Cross(v2, v1)
assert Cross(v1, v2) + Cross(v2, v1) == Vector.zero
def test_dot():
v1 = C.x * i + C.z * C.z * j
v2 = C.x * i + C.y * j + C.z * k
assert Dot(v1, v2) == Dot(C.x*C.i + C.z**2*C.j, C.x*C.i + C.y*C.j + C.z*C.k)
assert Dot(v1, v2).doit() == C.x**2 + C.y*C.z**2
assert Dot(v1, v2).doit() == C.x**2 + C.y*C.z**2
assert Dot(v1, v2) == Dot(v2, v1)
def test_vector_sympy():
v1 = 3*j
assert v1 == j*3
assert v1.components == {j: 3}
v2 = 3*i + 4*j + 5*k
v3 = 2*i + 4*j + i + 4*k + k
assert v3 == v2
assert v3.__hash__() == v2.__hash__()
def test_vector():
assert isinstance(i, BaseVector)
assert i != j
assert j != k
assert k != i
assert i - i == Vector.zero
assert i + Vector.zero == i
assert i - Vector.zero == i
assert Vector.zero != 0
assert -Vector.zero == Vector.zero
v1 = a*i + b*j + c*k
v2 = a**2*i + b**2*j + c**2*k
v3 = v1 + v2
v4 = 2 * v1
v5 = a * i
assert isinstance(v1, VectorAdd)
assert v1 - v1 == Vector.zero
assert v1 + Vector.zero == v1
assert v1.dot(i) == a
assert v1.dot(j) == b
assert v1.dot(k) == c
assert i.dot(v2) == a**2
assert j.dot(v2) == b**2
assert k.dot(v2) == c**2
assert v3.dot(i) == a**2 + a
assert v3.dot(j) == b**2 + b
assert v3.dot(k) == c**2 + c
assert v1 + v2 == v2 + v1
assert v1 - v2 == -1 * (v2 - v1)
assert a * v1 == v1 * a
assert isinstance(v5, VectorMul)
assert v5.base_vector == i
assert v5.measure_number == a
assert isinstance(v4, Vector)
assert isinstance(v4, VectorAdd)
assert isinstance(v4, Vector)
assert isinstance(Vector.zero, VectorZero)
assert isinstance(Vector.zero, Vector)
assert isinstance(v1 * 0, VectorZero)
assert v1.to_matrix(C) == Matrix([[a], [b], [c]])
assert i.components == {i: 1}
assert v5.components == {i: a}
assert v1.components == {i: a, j: b, k: c}
assert VectorAdd(v1, Vector.zero) == v1
assert VectorMul(a, v1) == v1*a
assert VectorMul(1, i) == i
assert VectorAdd(v1, Vector.zero) == v1
assert VectorMul(0, Vector.zero) == Vector.zero
def test_vector_magnitude_normalize():
assert Vector.zero.magnitude() == 0
assert Vector.zero.normalize() == Vector.zero
assert i.magnitude() == 1
assert j.magnitude() == 1
assert k.magnitude() == 1
assert i.normalize() == i
assert j.normalize() == j
assert k.normalize() == k
v1 = a * i
assert v1.normalize() == (a/sqrt(a**2))*i
assert v1.magnitude() == sqrt(a**2)
v2 = a*i + b*j + c*k
assert v2.magnitude() == sqrt(a**2 + b**2 + c**2)
assert v2.normalize() == v2 / v2.magnitude()
v3 = i + j
assert v3.normalize() == (sqrt(2)/2)*C.i + (sqrt(2)/2)*C.j
def test_vector_simplify():
A, s, k, m = symbols('A, s, k, m')
test1 = (1 / a + 1 / b) * i
assert (test1 & i) != (a + b) / (a * b)
test1 = simplify(test1)
assert (test1 & i) == (a + b) / (a * b)
assert test1.simplify() == simplify(test1)
test2 = (A**2 * s**4 / (4 * pi * k * m**3)) * i
test2 = simplify(test2)
assert (test2 & i) == (A**2 * s**4 / (4 * pi * k * m**3))
test3 = ((4 + 4 * a - 2 * (2 + 2 * a)) / (2 + 2 * a)) * i
test3 = simplify(test3)
assert (test3 & i) == 0
test4 = ((-4 * a * b**2 - 2 * b**3 - 2 * a**2 * b) / (a + b)**2) * i
test4 = simplify(test4)
assert (test4 & i) == -2 * b
v = (sin(a)+cos(a))**2*i - j
assert trigsimp(v) == (2*sin(a + pi/4)**2)*i + (-1)*j
assert trigsimp(v) == v.trigsimp()
assert simplify(Vector.zero) == Vector.zero
def test_vector_dot():
assert i.dot(Vector.zero) == 0
assert Vector.zero.dot(i) == 0
assert i & Vector.zero == 0
assert i.dot(i) == 1
assert i.dot(j) == 0
assert i.dot(k) == 0
assert i & i == 1
assert i & j == 0
assert i & k == 0
assert j.dot(i) == 0
assert j.dot(j) == 1
assert j.dot(k) == 0
assert j & i == 0
assert j & j == 1
assert j & k == 0
assert k.dot(i) == 0
assert k.dot(j) == 0
assert k.dot(k) == 1
assert k & i == 0
assert k & j == 0
assert k & k == 1
def test_vector_cross():
assert i.cross(Vector.zero) == Vector.zero
assert Vector.zero.cross(i) == Vector.zero
assert i.cross(i) == Vector.zero
assert i.cross(j) == k
assert i.cross(k) == -j
assert i ^ i == Vector.zero
assert i ^ j == k
assert i ^ k == -j
assert j.cross(i) == -k
assert j.cross(j) == Vector.zero
assert j.cross(k) == i
assert j ^ i == -k
assert j ^ j == Vector.zero
assert j ^ k == i
assert k.cross(i) == j
assert k.cross(j) == -i
assert k.cross(k) == Vector.zero
assert k ^ i == j
assert k ^ j == -i
assert k ^ k == Vector.zero
def test_projection():
v1 = i + j + k
v2 = 3*i + 4*j
v3 = 0*i + 0*j
assert v1.projection(v1) == i + j + k
assert v1.projection(v2) == S(7)/3*C.i + S(7)/3*C.j + S(7)/3*C.k
assert v1.projection(v1, scalar=True) == 1
assert v1.projection(v2, scalar=True) == S(7)/3
assert v3.projection(v1) == Vector.zero
def test_vector_diff_integrate():
f = Function('f')
v = f(a)*C.i + a**2*C.j - C.k
assert Derivative(v, a) == Derivative((f(a))*C.i +
a**2*C.j + (-1)*C.k, a)
assert (diff(v, a) == v.diff(a) == Derivative(v, a).doit() ==
(Derivative(f(a), a))*C.i + 2*a*C.j)
assert (Integral(v, a) == (Integral(f(a), a))*C.i +
(Integral(a**2, a))*C.j + (Integral(-1, a))*C.k)
| true
| true
|
1c47f49ae688340870b158350bc212b335d4eb6d
| 5,776
|
py
|
Python
|
app/views/navigation.py
|
tch1bo/viaduct
|
bfd37b0a8408b2dd66fb01138163b80ce97699ff
|
[
"MIT"
] | 11
|
2015-04-23T21:57:56.000Z
|
2019-04-28T12:48:58.000Z
|
app/views/navigation.py
|
tch1bo/viaduct
|
bfd37b0a8408b2dd66fb01138163b80ce97699ff
|
[
"MIT"
] | 1
|
2016-10-05T14:10:58.000Z
|
2016-10-05T14:12:23.000Z
|
app/views/navigation.py
|
tch1bo/viaduct
|
bfd37b0a8408b2dd66fb01138163b80ce97699ff
|
[
"MIT"
] | 3
|
2016-10-05T14:00:42.000Z
|
2019-01-16T14:33:43.000Z
|
import json
import re
from flask import Blueprint, render_template, abort, request, flash, \
redirect, url_for
from flask_babel import _
from flask_login import current_user
from app import db
from app.decorators import require_role
from app.forms import init_form
from app.forms.navigation import NavigationEntryForm
from app.models.navigation import NavigationEntry
from app.models.page import Page
from app.roles import Roles
from app.service import role_service, page_service
from app.utils.forms import flash_form_errors
from app.utils.navigation import NavigationAPI
from app.utils.resource import get_all_routes
blueprint = Blueprint('navigation', __name__, url_prefix='/navigation')
@blueprint.route('/')
@require_role(Roles.NAVIGATION_WRITE)
def view():
entries = NavigationAPI.get_root_entries()
can_write = role_service.user_has_role(current_user,
Roles.NAVIGATION_WRITE)
return render_template('navigation/view.htm', nav_entries=entries,
can_write=can_write)
@blueprint.route('/create/', methods=['GET', 'POST'])
@blueprint.route('/create/<int:parent_id>/', methods=['GET', 'POST'])
@blueprint.route('/edit/<int:entry_id>/', methods=['GET', 'POST'])
@require_role(Roles.NAVIGATION_WRITE)
def edit(entry_id=None, parent_id=None):
entry = NavigationEntry.query.get_or_404(entry_id) if entry_id else None
form = init_form(NavigationEntryForm, obj=entry)
form.page_id.choices = [(-1, '-- {} --'.format(_('Custom URL')))] + \
db.session.query(Page.id, Page.path).all()
parent = NavigationEntry.query.get(parent_id) if parent_id else None
if parent_id and not parent:
flash(_('Cannot find parent navigation entry.'), 'danger')
return redirect(url_for('navigation.view'))
if form.validate_on_submit():
url = None
if form.page_id.data == -1:
url = form.url.data
if not re.compile('^/').match(url):
url = '/' + url
page_id = None if form.page_id.data == -1 else form.page_id.data
if entry:
entry.nl_title = form.nl_title.data
entry.en_title = form.en_title.data
entry.url = url
entry.page_id = page_id
entry.external = form.external.data
entry.activity_list = form.activity_list.data
entry.order_children_alphabetically = \
form.order_children_alphabetically.data
else:
last_entry = NavigationEntry.query.filter_by(parent_id=None) \
.order_by(NavigationEntry.position.desc()).first()
# If there is no parent position the new entry at the end of the
# top level entry.
position = (last_entry.position + 1) if last_entry else 0
entry = NavigationEntry(parent, form.nl_title.data,
form.en_title.data, url, page_id,
form.external.data,
form.activity_list.data, position)
db.session.add(entry)
db.session.commit()
flash(_('The navigation entry has been saved.'), 'success')
if not page_id and not form.external.data:
# Check if the page exists, if not redirect to create it
path = form.url.data.lstrip('/')
page = page_service.get_page_by_path(path)
if url.rstrip('/') in get_all_routes():
return redirect(url_for('navigation.view'))
if not page and form.url.data != '/':
flash(_('The link refers to a page that does not exist, please'
'create the page!'), 'warning')
return redirect(url_for('page.edit_page', path=path))
return redirect(url_for('navigation.view'))
else:
flash_form_errors(form)
parents = NavigationEntry.query.filter_by(parent_id=None)
if entry:
parents = parents.filter(NavigationEntry.id != entry.id)
return render_template('navigation/edit.htm', entry=entry, form=form,
parents=parents.all())
@blueprint.route('/delete/<int:entry_id>/', methods=['POST'])
@blueprint.route('/delete/<int:entry_id>/<int:inc_page>', methods=['POST'])
@require_role(Roles.NAVIGATION_WRITE)
def delete(entry_id, inc_page=0):
if inc_page and not role_service.user_has_role(current_user,
Roles.PAGE_WRITE):
flash(_('You do not have rights to remove pages'))
return abort(403)
entry = db.session.query(NavigationEntry).filter_by(id=entry_id).first()
if not entry:
abort(404)
if not entry.parent:
if entry.children.count() > 0:
flash('Deze item heeft nog subitems.', 'danger')
return redirect(url_for('navigation.edit', entry_id=entry.id))
if inc_page:
if entry.external or entry.activity_list:
flash('Deze item verwijst niet naar een pagina op deze website.',
'danger')
else:
if (entry.url is None or page_service.delete_page_by_path(
entry.url.lstrip('/'))):
flash('De pagina is verwijderd.', 'success')
else:
flash('De te verwijderen pagina kon niet worden gevonden.',
'danger')
db.session.delete(entry)
db.session.commit()
flash('De navigatie-item is verwijderd.', 'success')
return redirect(url_for('navigation.view'))
@blueprint.route('/navigation/reorder', methods=['POST'])
@require_role(Roles.NAVIGATION_WRITE)
def reorder():
entries = json.loads(request.form['entries'])
NavigationAPI.order(entries, None)
return ""
| 38
| 79
| 0.630886
|
import json
import re
from flask import Blueprint, render_template, abort, request, flash, \
redirect, url_for
from flask_babel import _
from flask_login import current_user
from app import db
from app.decorators import require_role
from app.forms import init_form
from app.forms.navigation import NavigationEntryForm
from app.models.navigation import NavigationEntry
from app.models.page import Page
from app.roles import Roles
from app.service import role_service, page_service
from app.utils.forms import flash_form_errors
from app.utils.navigation import NavigationAPI
from app.utils.resource import get_all_routes
blueprint = Blueprint('navigation', __name__, url_prefix='/navigation')
@blueprint.route('/')
@require_role(Roles.NAVIGATION_WRITE)
def view():
entries = NavigationAPI.get_root_entries()
can_write = role_service.user_has_role(current_user,
Roles.NAVIGATION_WRITE)
return render_template('navigation/view.htm', nav_entries=entries,
can_write=can_write)
@blueprint.route('/create/', methods=['GET', 'POST'])
@blueprint.route('/create/<int:parent_id>/', methods=['GET', 'POST'])
@blueprint.route('/edit/<int:entry_id>/', methods=['GET', 'POST'])
@require_role(Roles.NAVIGATION_WRITE)
def edit(entry_id=None, parent_id=None):
entry = NavigationEntry.query.get_or_404(entry_id) if entry_id else None
form = init_form(NavigationEntryForm, obj=entry)
form.page_id.choices = [(-1, '-- {} --'.format(_('Custom URL')))] + \
db.session.query(Page.id, Page.path).all()
parent = NavigationEntry.query.get(parent_id) if parent_id else None
if parent_id and not parent:
flash(_('Cannot find parent navigation entry.'), 'danger')
return redirect(url_for('navigation.view'))
if form.validate_on_submit():
url = None
if form.page_id.data == -1:
url = form.url.data
if not re.compile('^/').match(url):
url = '/' + url
page_id = None if form.page_id.data == -1 else form.page_id.data
if entry:
entry.nl_title = form.nl_title.data
entry.en_title = form.en_title.data
entry.url = url
entry.page_id = page_id
entry.external = form.external.data
entry.activity_list = form.activity_list.data
entry.order_children_alphabetically = \
form.order_children_alphabetically.data
else:
last_entry = NavigationEntry.query.filter_by(parent_id=None) \
.order_by(NavigationEntry.position.desc()).first()
position = (last_entry.position + 1) if last_entry else 0
entry = NavigationEntry(parent, form.nl_title.data,
form.en_title.data, url, page_id,
form.external.data,
form.activity_list.data, position)
db.session.add(entry)
db.session.commit()
flash(_('The navigation entry has been saved.'), 'success')
if not page_id and not form.external.data:
path = form.url.data.lstrip('/')
page = page_service.get_page_by_path(path)
if url.rstrip('/') in get_all_routes():
return redirect(url_for('navigation.view'))
if not page and form.url.data != '/':
flash(_('The link refers to a page that does not exist, please'
'create the page!'), 'warning')
return redirect(url_for('page.edit_page', path=path))
return redirect(url_for('navigation.view'))
else:
flash_form_errors(form)
parents = NavigationEntry.query.filter_by(parent_id=None)
if entry:
parents = parents.filter(NavigationEntry.id != entry.id)
return render_template('navigation/edit.htm', entry=entry, form=form,
parents=parents.all())
@blueprint.route('/delete/<int:entry_id>/', methods=['POST'])
@blueprint.route('/delete/<int:entry_id>/<int:inc_page>', methods=['POST'])
@require_role(Roles.NAVIGATION_WRITE)
def delete(entry_id, inc_page=0):
if inc_page and not role_service.user_has_role(current_user,
Roles.PAGE_WRITE):
flash(_('You do not have rights to remove pages'))
return abort(403)
entry = db.session.query(NavigationEntry).filter_by(id=entry_id).first()
if not entry:
abort(404)
if not entry.parent:
if entry.children.count() > 0:
flash('Deze item heeft nog subitems.', 'danger')
return redirect(url_for('navigation.edit', entry_id=entry.id))
if inc_page:
if entry.external or entry.activity_list:
flash('Deze item verwijst niet naar een pagina op deze website.',
'danger')
else:
if (entry.url is None or page_service.delete_page_by_path(
entry.url.lstrip('/'))):
flash('De pagina is verwijderd.', 'success')
else:
flash('De te verwijderen pagina kon niet worden gevonden.',
'danger')
db.session.delete(entry)
db.session.commit()
flash('De navigatie-item is verwijderd.', 'success')
return redirect(url_for('navigation.view'))
@blueprint.route('/navigation/reorder', methods=['POST'])
@require_role(Roles.NAVIGATION_WRITE)
def reorder():
entries = json.loads(request.form['entries'])
NavigationAPI.order(entries, None)
return ""
| true
| true
|
1c47f7e4b0e586c5810d5893015727706ff08291
| 1,641
|
py
|
Python
|
aionewton/wrapper.py
|
AndrielFR/aionewton
|
0f740851154de0e4f64d7c9f676b0b27eaabeccc
|
[
"MIT"
] | 1
|
2021-01-27T14:35:33.000Z
|
2021-01-27T14:35:33.000Z
|
aionewton/wrapper.py
|
AndrielFR/aionewton
|
0f740851154de0e4f64d7c9f676b0b27eaabeccc
|
[
"MIT"
] | null | null | null |
aionewton/wrapper.py
|
AndrielFR/aionewton
|
0f740851154de0e4f64d7c9f676b0b27eaabeccc
|
[
"MIT"
] | null | null | null |
"""An asnycio-based wrapper for `https://newton.now.sh`"""
import sys
from urllib.parse import quote
import aiohttp
ENDPOINTS = ["simplify", "factor", "derive", "integrate", "zeroes", "tangent",
"area", "cos", "sin", "tan", "arccos", "arcsin", "arctan", "abs",
"log"]
class Result:
def __init__(self, **kwargs):
self.operation = kwargs.get("operation", None)
self.expression = kwargs.get("expression", None)
self.result = kwargs.get("result", None)
self.raw = kwargs
def __str__(self):
return str(self.result)
__repr__ = __str__
async def _make_request(operation, expression):
"""Internal function to request a page by using a given string"""
encoded_expression = quote(expression, safe='')
url = f"https://newton.now.sh/api/v2/{operation}/{encoded_expression}"
async with aiohttp.ClientSession() as session:
async with session.get(url) as req:
assert isinstance(req, aiohttp.ClientResponse)
res = await req.json()
return Result(**res)
def wrap_coro(coro):
async def func():
return await coro
return func()
def expose_endpoints(module, *args):
"""
Expose modules globally
"""
# Credit goes to https://github.com/benpryke/PyNewtonMath
# for giving me the idea of wrapping them dynamically.
for op in args:
# Wrap function
def wrap(operator):
return lambda exp: wrap_coro(_make_request(operator, exp))
setattr(sys.modules[__name__], op, wrap(op))
setattr(module, op, getattr(sys.modules[__name__], op))
| 27.35
| 78
| 0.631322
|
import sys
from urllib.parse import quote
import aiohttp
ENDPOINTS = ["simplify", "factor", "derive", "integrate", "zeroes", "tangent",
"area", "cos", "sin", "tan", "arccos", "arcsin", "arctan", "abs",
"log"]
class Result:
def __init__(self, **kwargs):
self.operation = kwargs.get("operation", None)
self.expression = kwargs.get("expression", None)
self.result = kwargs.get("result", None)
self.raw = kwargs
def __str__(self):
return str(self.result)
__repr__ = __str__
async def _make_request(operation, expression):
encoded_expression = quote(expression, safe='')
url = f"https://newton.now.sh/api/v2/{operation}/{encoded_expression}"
async with aiohttp.ClientSession() as session:
async with session.get(url) as req:
assert isinstance(req, aiohttp.ClientResponse)
res = await req.json()
return Result(**res)
def wrap_coro(coro):
async def func():
return await coro
return func()
def expose_endpoints(module, *args):
for op in args:
def wrap(operator):
return lambda exp: wrap_coro(_make_request(operator, exp))
setattr(sys.modules[__name__], op, wrap(op))
setattr(module, op, getattr(sys.modules[__name__], op))
| true
| true
|
1c47f87cd0eec39b0b393cb0abd277a23b887c5d
| 1,866
|
py
|
Python
|
examples/basic_simple_color_triangle.py
|
2dx/moderngl
|
5f932560a535469626d79d22e4205f400e18f328
|
[
"MIT"
] | 916
|
2019-03-11T19:15:20.000Z
|
2022-03-31T19:22:16.000Z
|
examples/basic_simple_color_triangle.py
|
2dx/moderngl
|
5f932560a535469626d79d22e4205f400e18f328
|
[
"MIT"
] | 218
|
2019-03-11T06:05:52.000Z
|
2022-03-30T16:59:22.000Z
|
examples/basic_simple_color_triangle.py
|
2dx/moderngl
|
5f932560a535469626d79d22e4205f400e18f328
|
[
"MIT"
] | 110
|
2019-04-06T18:32:24.000Z
|
2022-03-21T20:30:47.000Z
|
'''
Renders a traingle that has all RGB combinations
'''
import numpy as np
from ported._example import Example
class SimpleColorTriangle(Example):
gl_version = (3, 3)
aspect_ratio = 16 / 9
title = "Simple Color Triangle"
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.prog = self.ctx.program(
vertex_shader='''
#version 330
in vec2 in_vert;
in vec3 in_color;
out vec3 v_color; // Goes to the fragment shader
void main() {
gl_Position = vec4(in_vert, 0.0, 1.0);
v_color = in_color;
}
''',
fragment_shader='''
#version 330
in vec3 v_color;
out vec4 f_color;
void main() {
// We're not interested in changing the alpha value
f_color = vec4(v_color, 1.0);
}
''',
)
# Point coordinates are put followed by the vec3 color values
vertices = np.array([
# x, y, red, green, blue
0.0, 0.8, 1.0, 0.0, 0.0,
-0.6, -0.8, 0.0, 1.0, 0.0,
0.6, -0.8, 0.0, 0.0, 1.0,
], dtype='f4')
self.vbo = self.ctx.buffer(vertices)
# We control the 'in_vert' and `in_color' variables
self.vao = self.ctx.vertex_array(
self.prog,
[
# Map in_vert to the first 2 floats
# Map in_color to the next 3 floats
(self.vbo, '2f 3f', 'in_vert', 'in_color')
],
)
def render(self, time: float, frame_time: float):
self.ctx.clear(1.0, 1.0, 1.0)
self.vao.render()
if __name__ == '__main__':
SimpleColorTriangle.run()
| 25.916667
| 71
| 0.478028
|
import numpy as np
from ported._example import Example
class SimpleColorTriangle(Example):
gl_version = (3, 3)
aspect_ratio = 16 / 9
title = "Simple Color Triangle"
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.prog = self.ctx.program(
vertex_shader='''
#version 330
in vec2 in_vert;
in vec3 in_color;
out vec3 v_color; // Goes to the fragment shader
void main() {
gl_Position = vec4(in_vert, 0.0, 1.0);
v_color = in_color;
}
''',
fragment_shader='''
#version 330
in vec3 v_color;
out vec4 f_color;
void main() {
// We're not interested in changing the alpha value
f_color = vec4(v_color, 1.0);
}
''',
)
# Point coordinates are put followed by the vec3 color values
vertices = np.array([
# x, y, red, green, blue
0.0, 0.8, 1.0, 0.0, 0.0,
-0.6, -0.8, 0.0, 1.0, 0.0,
0.6, -0.8, 0.0, 0.0, 1.0,
], dtype='f4')
self.vbo = self.ctx.buffer(vertices)
# We control the 'in_vert' and `in_color' variables
self.vao = self.ctx.vertex_array(
self.prog,
[
(self.vbo, '2f 3f', 'in_vert', 'in_color')
],
)
def render(self, time: float, frame_time: float):
self.ctx.clear(1.0, 1.0, 1.0)
self.vao.render()
if __name__ == '__main__':
SimpleColorTriangle.run()
| true
| true
|
1c47f8c6afe39da638d403941783b7f752b3f9d1
| 57,312
|
py
|
Python
|
nova/tests/unit/api/openstack/compute/test_hypervisors.py
|
mertakozcan/nova
|
6e4ab9714cc0ca147f61997aa7b68f88185ade5c
|
[
"Apache-2.0"
] | 1
|
2019-04-22T06:25:26.000Z
|
2019-04-22T06:25:26.000Z
|
nova/tests/unit/api/openstack/compute/test_hypervisors.py
|
woraser/nova
|
fc3890667e4971e3f0f35ac921c2a6c25f72adec
|
[
"Apache-2.0"
] | null | null | null |
nova/tests/unit/api/openstack/compute/test_hypervisors.py
|
woraser/nova
|
fc3890667e4971e3f0f35ac921c2a6c25f72adec
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import mock
import netaddr
from oslo_serialization import jsonutils
from oslo_utils.fixture import uuidsentinel as uuids
import six
from webob import exc
from nova.api.openstack.compute import hypervisors \
as hypervisors_v21
from nova import exception
from nova import objects
from nova import test
from nova.tests.unit.api.openstack import fakes
from nova.tests.unit import fake_instance
CPU_INFO = """
{"arch": "x86_64",
"vendor": "fake",
"topology": {"cores": 1, "threads": 1, "sockets": 1},
"features": [],
"model": ""}"""
TEST_HYPERS = [
dict(id=1,
uuid=uuids.hyper1,
service_id=1,
host="compute1",
vcpus=4,
memory_mb=10 * 1024,
local_gb=250,
vcpus_used=2,
memory_mb_used=5 * 1024,
local_gb_used=125,
hypervisor_type="xen",
hypervisor_version=3,
hypervisor_hostname="hyper1",
free_ram_mb=5 * 1024,
free_disk_gb=125,
current_workload=2,
running_vms=2,
cpu_info=CPU_INFO,
disk_available_least=100,
host_ip=netaddr.IPAddress('1.1.1.1')),
dict(id=2,
uuid=uuids.hyper2,
service_id=2,
host="compute2",
vcpus=4,
memory_mb=10 * 1024,
local_gb=250,
vcpus_used=2,
memory_mb_used=5 * 1024,
local_gb_used=125,
hypervisor_type="xen",
hypervisor_version=3,
hypervisor_hostname="hyper2",
free_ram_mb=5 * 1024,
free_disk_gb=125,
current_workload=2,
running_vms=2,
cpu_info=CPU_INFO,
disk_available_least=100,
host_ip=netaddr.IPAddress('2.2.2.2'))]
TEST_SERVICES = [
objects.Service(id=1,
uuid=uuids.service1,
host="compute1",
binary="nova-compute",
topic="compute_topic",
report_count=5,
disabled=False,
disabled_reason=None,
availability_zone="nova"),
objects.Service(id=2,
uuid=uuids.service2,
host="compute2",
binary="nova-compute",
topic="compute_topic",
report_count=5,
disabled=False,
disabled_reason=None,
availability_zone="nova"),
]
TEST_HYPERS_OBJ = [objects.ComputeNode(**hyper_dct)
for hyper_dct in TEST_HYPERS]
TEST_HYPERS[0].update({'service': TEST_SERVICES[0]})
TEST_HYPERS[1].update({'service': TEST_SERVICES[1]})
TEST_SERVERS = [dict(name="inst1", uuid=uuids.instance_1, host="compute1"),
dict(name="inst2", uuid=uuids.instance_2, host="compute2"),
dict(name="inst3", uuid=uuids.instance_3, host="compute1"),
dict(name="inst4", uuid=uuids.instance_4, host="compute2")]
def fake_compute_node_get_all(context, limit=None, marker=None):
if marker in ['99999', uuids.invalid_marker]:
raise exception.MarkerNotFound(marker)
marker_found = True if marker is None else False
output = []
for hyper in TEST_HYPERS_OBJ:
# Starting with the 2.53 microversion, the marker is a uuid.
if not marker_found and marker in (str(hyper.id), hyper.uuid):
marker_found = True
elif marker_found:
if limit is None or len(output) < int(limit):
output.append(hyper)
return output
def fake_compute_node_search_by_hypervisor(context, hypervisor_re):
return TEST_HYPERS_OBJ
def fake_compute_node_get(context, compute_id):
for hyper in TEST_HYPERS_OBJ:
if hyper.uuid == compute_id or hyper.id == int(compute_id):
return hyper
raise exception.ComputeHostNotFound(host=compute_id)
def fake_service_get_by_compute_host(context, host):
for service in TEST_SERVICES:
if service.host == host:
return service
def fake_compute_node_statistics(context):
result = dict(
count=0,
vcpus=0,
memory_mb=0,
local_gb=0,
vcpus_used=0,
memory_mb_used=0,
local_gb_used=0,
free_ram_mb=0,
free_disk_gb=0,
current_workload=0,
running_vms=0,
disk_available_least=0,
)
for hyper in TEST_HYPERS_OBJ:
for key in result:
if key == 'count':
result[key] += 1
else:
result[key] += getattr(hyper, key)
return result
def fake_instance_get_all_by_host(context, host):
results = []
for inst in TEST_SERVERS:
if inst['host'] == host:
inst_obj = fake_instance.fake_instance_obj(context, **inst)
results.append(inst_obj)
return results
class HypervisorsTestV21(test.NoDBTestCase):
api_version = '2.1'
# Allow subclasses to override if the id value in the response is the
# compute node primary key integer id or the uuid.
expect_uuid_for_id = False
# TODO(stephenfin): These should just be defined here
TEST_HYPERS_OBJ = copy.deepcopy(TEST_HYPERS_OBJ)
TEST_SERVICES = copy.deepcopy(TEST_SERVICES)
TEST_SERVERS = copy.deepcopy(TEST_SERVERS)
DETAIL_HYPERS_DICTS = copy.deepcopy(TEST_HYPERS)
del DETAIL_HYPERS_DICTS[0]['service_id']
del DETAIL_HYPERS_DICTS[1]['service_id']
del DETAIL_HYPERS_DICTS[0]['host']
del DETAIL_HYPERS_DICTS[1]['host']
del DETAIL_HYPERS_DICTS[0]['uuid']
del DETAIL_HYPERS_DICTS[1]['uuid']
DETAIL_HYPERS_DICTS[0].update({'state': 'up',
'status': 'enabled',
'service': dict(id=1, host='compute1',
disabled_reason=None)})
DETAIL_HYPERS_DICTS[1].update({'state': 'up',
'status': 'enabled',
'service': dict(id=2, host='compute2',
disabled_reason=None)})
INDEX_HYPER_DICTS = [
dict(id=1, hypervisor_hostname="hyper1",
state='up', status='enabled'),
dict(id=2, hypervisor_hostname="hyper2",
state='up', status='enabled')]
DETAIL_NULL_CPUINFO_DICT = {'': '', None: None}
def _get_request(self, use_admin_context, url=''):
return fakes.HTTPRequest.blank(url,
use_admin_context=use_admin_context,
version=self.api_version)
def _set_up_controller(self):
self.controller = hypervisors_v21.HypervisorsController()
self.controller.servicegroup_api.service_is_up = mock.MagicMock(
return_value=True)
def _get_hyper_id(self):
"""Helper function to get the proper hypervisor id for a request
:returns: The first hypervisor's uuid for microversions that expect a
uuid for the id, otherwise the hypervisor's id primary key
"""
return (self.TEST_HYPERS_OBJ[0].uuid if self.expect_uuid_for_id
else self.TEST_HYPERS_OBJ[0].id)
def setUp(self):
super(HypervisorsTestV21, self).setUp()
self._set_up_controller()
self.rule_hyp_show = "os_compute_api:os-hypervisors"
host_api = self.controller.host_api
host_api.compute_node_get_all = mock.MagicMock(
side_effect=fake_compute_node_get_all)
host_api.service_get_by_compute_host = mock.MagicMock(
side_effect=fake_service_get_by_compute_host)
host_api.compute_node_search_by_hypervisor = mock.MagicMock(
side_effect=fake_compute_node_search_by_hypervisor)
host_api.compute_node_get = mock.MagicMock(
side_effect=fake_compute_node_get)
self.stub_out('nova.db.api.compute_node_statistics',
fake_compute_node_statistics)
def test_view_hypervisor_nodetail_noservers(self):
req = self._get_request(True)
result = self.controller._view_hypervisor(
self.TEST_HYPERS_OBJ[0], self.TEST_SERVICES[0], False, req)
self.assertEqual(self.INDEX_HYPER_DICTS[0], result)
def test_view_hypervisor_detail_noservers(self):
req = self._get_request(True)
result = self.controller._view_hypervisor(
self.TEST_HYPERS_OBJ[0], self.TEST_SERVICES[0], True, req)
self.assertEqual(self.DETAIL_HYPERS_DICTS[0], result)
def test_view_hypervisor_servers(self):
req = self._get_request(True)
result = self.controller._view_hypervisor(self.TEST_HYPERS_OBJ[0],
self.TEST_SERVICES[0],
False, req,
self.TEST_SERVERS)
expected_dict = copy.deepcopy(self.INDEX_HYPER_DICTS[0])
expected_dict.update({'servers': [
dict(name="inst1", uuid=uuids.instance_1),
dict(name="inst2", uuid=uuids.instance_2),
dict(name="inst3", uuid=uuids.instance_3),
dict(name="inst4", uuid=uuids.instance_4)]})
self.assertEqual(expected_dict, result)
def _test_view_hypervisor_detail_cpuinfo_null(self, cpu_info):
req = self._get_request(True)
test_hypervisor_obj = copy.deepcopy(self.TEST_HYPERS_OBJ[0])
test_hypervisor_obj.cpu_info = cpu_info
result = self.controller._view_hypervisor(test_hypervisor_obj,
self.TEST_SERVICES[0],
True, req)
expected_dict = copy.deepcopy(self.DETAIL_HYPERS_DICTS[0])
expected_dict.update({'cpu_info':
self.DETAIL_NULL_CPUINFO_DICT[cpu_info]})
self.assertEqual(result, expected_dict)
def test_view_hypervisor_detail_cpuinfo_empty_string(self):
self._test_view_hypervisor_detail_cpuinfo_null('')
def test_view_hypervisor_detail_cpuinfo_none(self):
self._test_view_hypervisor_detail_cpuinfo_null(None)
def test_index(self):
req = self._get_request(True)
result = self.controller.index(req)
self.assertEqual(dict(hypervisors=self.INDEX_HYPER_DICTS), result)
def test_index_non_admin(self):
req = self._get_request(False)
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.index, req)
def test_index_compute_host_not_found(self):
"""Tests that if a service is deleted but the compute node is not we
don't fail when listing hypervisors.
"""
# two computes, a matching service only exists for the first one
compute_nodes = objects.ComputeNodeList(objects=[
objects.ComputeNode(**TEST_HYPERS[0]),
objects.ComputeNode(**TEST_HYPERS[1])
])
def fake_service_get_by_compute_host(context, host):
if host == TEST_HYPERS[0]['host']:
return TEST_SERVICES[0]
raise exception.ComputeHostNotFound(host=host)
@mock.patch.object(self.controller.host_api, 'compute_node_get_all',
return_value=compute_nodes)
@mock.patch.object(self.controller.host_api,
'service_get_by_compute_host',
fake_service_get_by_compute_host)
def _test(self, compute_node_get_all):
req = self._get_request(True)
result = self.controller.index(req)
self.assertEqual(1, len(result['hypervisors']))
expected = {
'id': compute_nodes[0].uuid if self.expect_uuid_for_id
else compute_nodes[0].id,
'hypervisor_hostname': compute_nodes[0].hypervisor_hostname,
'state': 'up',
'status': 'enabled',
}
self.assertDictEqual(expected, result['hypervisors'][0])
_test(self)
def test_index_compute_host_not_mapped(self):
"""Tests that we don't fail index if a host is not mapped."""
# two computes, a matching service only exists for the first one
compute_nodes = objects.ComputeNodeList(objects=[
objects.ComputeNode(**TEST_HYPERS[0]),
objects.ComputeNode(**TEST_HYPERS[1])
])
def fake_service_get_by_compute_host(context, host):
if host == TEST_HYPERS[0]['host']:
return TEST_SERVICES[0]
raise exception.HostMappingNotFound(name=host)
@mock.patch.object(self.controller.host_api, 'compute_node_get_all',
return_value=compute_nodes)
@mock.patch.object(self.controller.host_api,
'service_get_by_compute_host',
fake_service_get_by_compute_host)
def _test(self, compute_node_get_all):
req = self._get_request(True)
result = self.controller.index(req)
self.assertEqual(1, len(result['hypervisors']))
expected = {
'id': compute_nodes[0].uuid if self.expect_uuid_for_id
else compute_nodes[0].id,
'hypervisor_hostname': compute_nodes[0].hypervisor_hostname,
'state': 'up',
'status': 'enabled',
}
self.assertDictEqual(expected, result['hypervisors'][0])
_test(self)
def test_detail(self):
req = self._get_request(True)
result = self.controller.detail(req)
self.assertEqual(dict(hypervisors=self.DETAIL_HYPERS_DICTS), result)
def test_detail_non_admin(self):
req = self._get_request(False)
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.detail, req)
def test_detail_compute_host_not_found(self):
"""Tests that if a service is deleted but the compute node is not we
don't fail when listing hypervisors.
"""
# two computes, a matching service only exists for the first one
compute_nodes = objects.ComputeNodeList(objects=[
objects.ComputeNode(**TEST_HYPERS[0]),
objects.ComputeNode(**TEST_HYPERS[1])
])
def fake_service_get_by_compute_host(context, host):
if host == TEST_HYPERS[0]['host']:
return TEST_SERVICES[0]
raise exception.ComputeHostNotFound(host=host)
@mock.patch.object(self.controller.host_api, 'compute_node_get_all',
return_value=compute_nodes)
@mock.patch.object(self.controller.host_api,
'service_get_by_compute_host',
fake_service_get_by_compute_host)
def _test(self, compute_node_get_all):
req = self._get_request(True)
result = self.controller.detail(req)
self.assertEqual(1, len(result['hypervisors']))
expected = {
'id': compute_nodes[0].id,
'hypervisor_hostname': compute_nodes[0].hypervisor_hostname,
'state': 'up',
'status': 'enabled',
}
# we don't care about all of the details, just make sure we get
# the subset we care about and there are more keys than what index
# would return
hypervisor = result['hypervisors'][0]
self.assertTrue(
set(expected.keys()).issubset(set(hypervisor.keys())))
self.assertGreater(len(hypervisor.keys()), len(expected.keys()))
self.assertEqual(compute_nodes[0].hypervisor_hostname,
hypervisor['hypervisor_hostname'])
_test(self)
def test_detail_compute_host_not_mapped(self):
"""Tests that if a service is deleted but the compute node is not we
don't fail when listing hypervisors.
"""
# two computes, a matching service only exists for the first one
compute_nodes = objects.ComputeNodeList(objects=[
objects.ComputeNode(**TEST_HYPERS[0]),
objects.ComputeNode(**TEST_HYPERS[1])
])
def fake_service_get_by_compute_host(context, host):
if host == TEST_HYPERS[0]['host']:
return TEST_SERVICES[0]
raise exception.HostMappingNotFound(name=host)
@mock.patch.object(self.controller.host_api, 'compute_node_get_all',
return_value=compute_nodes)
@mock.patch.object(self.controller.host_api,
'service_get_by_compute_host',
fake_service_get_by_compute_host)
def _test(self, compute_node_get_all):
req = self._get_request(True)
result = self.controller.detail(req)
self.assertEqual(1, len(result['hypervisors']))
expected = {
'id': compute_nodes[0].id,
'hypervisor_hostname': compute_nodes[0].hypervisor_hostname,
'state': 'up',
'status': 'enabled',
}
# we don't care about all of the details, just make sure we get
# the subset we care about and there are more keys than what index
# would return
hypervisor = result['hypervisors'][0]
self.assertTrue(
set(expected.keys()).issubset(set(hypervisor.keys())))
self.assertGreater(len(hypervisor.keys()), len(expected.keys()))
self.assertEqual(compute_nodes[0].hypervisor_hostname,
hypervisor['hypervisor_hostname'])
_test(self)
def test_show_compute_host_not_mapped(self):
"""Tests that if a service is deleted but the compute node is not we
don't fail when listing hypervisors.
"""
@mock.patch.object(self.controller.host_api, 'compute_node_get',
return_value=self.TEST_HYPERS_OBJ[0])
@mock.patch.object(self.controller.host_api,
'service_get_by_compute_host')
def _test(self, mock_service, mock_compute_node_get):
req = self._get_request(True)
mock_service.side_effect = exception.HostMappingNotFound(
name='foo')
hyper_id = self._get_hyper_id()
self.assertRaises(exc.HTTPNotFound, self.controller.show,
req, hyper_id)
self.assertTrue(mock_service.called)
mock_compute_node_get.assert_called_once_with(mock.ANY, hyper_id)
_test(self)
def test_show_noid(self):
req = self._get_request(True)
hyperid = uuids.hyper3 if self.expect_uuid_for_id else '3'
self.assertRaises(exc.HTTPNotFound, self.controller.show, req, hyperid)
def test_show_non_integer_id(self):
req = self._get_request(True)
self.assertRaises(exc.HTTPNotFound, self.controller.show, req, 'abc')
def test_show_withid(self):
req = self._get_request(True)
hyper_id = self._get_hyper_id()
result = self.controller.show(req, hyper_id)
self.assertEqual(dict(hypervisor=self.DETAIL_HYPERS_DICTS[0]), result)
def test_show_non_admin(self):
req = self._get_request(False)
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.show, req,
self._get_hyper_id())
def test_uptime_noid(self):
req = self._get_request(True)
hyper_id = uuids.hyper3 if self.expect_uuid_for_id else '3'
self.assertRaises(exc.HTTPNotFound, self.controller.uptime, req,
hyper_id)
def test_uptime_notimplemented(self):
with mock.patch.object(self.controller.host_api, 'get_host_uptime',
side_effect=exc.HTTPNotImplemented()
) as mock_get_uptime:
req = self._get_request(True)
hyper_id = self._get_hyper_id()
self.assertRaises(exc.HTTPNotImplemented,
self.controller.uptime, req, hyper_id)
self.assertEqual(1, mock_get_uptime.call_count)
def test_uptime_implemented(self):
with mock.patch.object(self.controller.host_api, 'get_host_uptime',
return_value="fake uptime"
) as mock_get_uptime:
req = self._get_request(True)
hyper_id = self._get_hyper_id()
result = self.controller.uptime(req, hyper_id)
expected_dict = copy.deepcopy(self.INDEX_HYPER_DICTS[0])
expected_dict.update({'uptime': "fake uptime"})
self.assertEqual(dict(hypervisor=expected_dict), result)
self.assertEqual(1, mock_get_uptime.call_count)
def test_uptime_non_integer_id(self):
req = self._get_request(True)
self.assertRaises(exc.HTTPNotFound, self.controller.uptime, req, 'abc')
def test_uptime_non_admin(self):
req = self._get_request(False)
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.uptime, req,
self.TEST_HYPERS_OBJ[0].id)
def test_uptime_hypervisor_down(self):
with mock.patch.object(self.controller.host_api, 'get_host_uptime',
side_effect=exception.ComputeServiceUnavailable(host='dummy')
) as mock_get_uptime:
req = self._get_request(True)
hyper_id = self._get_hyper_id()
self.assertRaises(exc.HTTPBadRequest,
self.controller.uptime, req, hyper_id)
mock_get_uptime.assert_called_once_with(
mock.ANY, self.TEST_HYPERS_OBJ[0].host)
def test_uptime_hypervisor_not_mapped_service_get(self):
@mock.patch.object(self.controller.host_api, 'compute_node_get')
@mock.patch.object(self.controller.host_api, 'get_host_uptime')
@mock.patch.object(self.controller.host_api,
'service_get_by_compute_host',
side_effect=exception.HostMappingNotFound(
name='dummy'))
def _test(mock_get, _, __):
req = self._get_request(True)
hyper_id = self._get_hyper_id()
self.assertRaises(exc.HTTPNotFound,
self.controller.uptime, req, hyper_id)
self.assertTrue(mock_get.called)
_test()
def test_uptime_hypervisor_not_mapped(self):
with mock.patch.object(self.controller.host_api, 'get_host_uptime',
side_effect=exception.HostMappingNotFound(name='dummy')
) as mock_get_uptime:
req = self._get_request(True)
hyper_id = self._get_hyper_id()
self.assertRaises(exc.HTTPNotFound,
self.controller.uptime, req, hyper_id)
mock_get_uptime.assert_called_once_with(
mock.ANY, self.TEST_HYPERS_OBJ[0].host)
def test_search(self):
req = self._get_request(True)
result = self.controller.search(req, 'hyper')
self.assertEqual(dict(hypervisors=self.INDEX_HYPER_DICTS), result)
def test_search_non_admin(self):
req = self._get_request(False)
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.search, req,
self.TEST_HYPERS_OBJ[0].id)
def test_search_non_exist(self):
with mock.patch.object(self.controller.host_api,
'compute_node_search_by_hypervisor',
return_value=[]) as mock_node_search:
req = self._get_request(True)
self.assertRaises(exc.HTTPNotFound, self.controller.search,
req, 'a')
self.assertEqual(1, mock_node_search.call_count)
def test_search_unmapped(self):
@mock.patch.object(self.controller.host_api,
'compute_node_search_by_hypervisor')
@mock.patch.object(self.controller.host_api,
'service_get_by_compute_host')
def _test(mock_service, mock_search):
mock_search.return_value = [mock.MagicMock()]
mock_service.side_effect = exception.HostMappingNotFound(
name='foo')
req = self._get_request(True)
self.assertRaises(exc.HTTPNotFound, self.controller.search,
req, 'a')
self.assertTrue(mock_service.called)
_test()
@mock.patch.object(objects.InstanceList, 'get_by_host',
side_effect=fake_instance_get_all_by_host)
def test_servers(self, mock_get):
req = self._get_request(True)
result = self.controller.servers(req, 'hyper')
expected_dict = copy.deepcopy(self.INDEX_HYPER_DICTS)
expected_dict[0].update({'servers': [
dict(uuid=uuids.instance_1),
dict(uuid=uuids.instance_3)]})
expected_dict[1].update({'servers': [
dict(uuid=uuids.instance_2),
dict(uuid=uuids.instance_4)]})
for output in result['hypervisors']:
servers = output['servers']
for server in servers:
del server['name']
self.assertEqual(dict(hypervisors=expected_dict), result)
def test_servers_not_mapped(self):
req = self._get_request(True)
with mock.patch.object(self.controller.host_api,
'instance_get_all_by_host') as m:
m.side_effect = exception.HostMappingNotFound(name='something')
self.assertRaises(exc.HTTPNotFound,
self.controller.servers, req, 'hyper')
def test_servers_non_id(self):
with mock.patch.object(self.controller.host_api,
'compute_node_search_by_hypervisor',
return_value=[]) as mock_node_search:
req = self._get_request(True)
self.assertRaises(exc.HTTPNotFound,
self.controller.servers,
req, '115')
self.assertEqual(1, mock_node_search.call_count)
def test_servers_non_admin(self):
req = self._get_request(False)
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.servers, req,
self.TEST_HYPERS_OBJ[0].id)
def test_servers_with_non_integer_hypervisor_id(self):
with mock.patch.object(self.controller.host_api,
'compute_node_search_by_hypervisor',
return_value=[]) as mock_node_search:
req = self._get_request(True)
self.assertRaises(exc.HTTPNotFound,
self.controller.servers, req, 'abc')
self.assertEqual(1, mock_node_search.call_count)
def test_servers_with_no_server(self):
with mock.patch.object(self.controller.host_api,
'instance_get_all_by_host',
return_value=[]) as mock_inst_get_all:
req = self._get_request(True)
result = self.controller.servers(req, self.TEST_HYPERS_OBJ[0].id)
self.assertEqual(dict(hypervisors=self.INDEX_HYPER_DICTS), result)
self.assertTrue(mock_inst_get_all.called)
def test_statistics(self):
req = self._get_request(True)
result = self.controller.statistics(req)
self.assertEqual(dict(hypervisor_statistics=dict(
count=2,
vcpus=8,
memory_mb=20 * 1024,
local_gb=500,
vcpus_used=4,
memory_mb_used=10 * 1024,
local_gb_used=250,
free_ram_mb=10 * 1024,
free_disk_gb=250,
current_workload=4,
running_vms=4,
disk_available_least=200)), result)
def test_statistics_non_admin(self):
req = self._get_request(False)
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.statistics, req)
class HypervisorsTestV228(HypervisorsTestV21):
api_version = '2.28'
DETAIL_HYPERS_DICTS = copy.deepcopy(HypervisorsTestV21.DETAIL_HYPERS_DICTS)
DETAIL_HYPERS_DICTS[0]['cpu_info'] = jsonutils.loads(CPU_INFO)
DETAIL_HYPERS_DICTS[1]['cpu_info'] = jsonutils.loads(CPU_INFO)
DETAIL_NULL_CPUINFO_DICT = {'': {}, None: {}}
class HypervisorsTestV233(HypervisorsTestV228):
api_version = '2.33'
def test_index_pagination(self):
req = self._get_request(True,
'/v2/1234/os-hypervisors?limit=1&marker=1')
result = self.controller.index(req)
expected = {
'hypervisors': [
{'hypervisor_hostname': 'hyper2',
'id': 2,
'state': 'up',
'status': 'enabled'}
],
'hypervisors_links': [
{'href': 'http://localhost/v2/hypervisors?limit=1&marker=2',
'rel': 'next'}
]
}
self.assertEqual(expected, result)
def test_index_pagination_with_invalid_marker(self):
req = self._get_request(True,
'/v2/1234/os-hypervisors?marker=99999')
self.assertRaises(exc.HTTPBadRequest,
self.controller.index, req)
def test_index_pagination_with_invalid_non_int_limit(self):
req = self._get_request(True,
'/v2/1234/os-hypervisors?limit=-9')
self.assertRaises(exception.ValidationError,
self.controller.index, req)
def test_index_pagination_with_invalid_string_limit(self):
req = self._get_request(True,
'/v2/1234/os-hypervisors?limit=abc')
self.assertRaises(exception.ValidationError,
self.controller.index, req)
def test_index_duplicate_query_parameters_with_invalid_string_limit(self):
req = self._get_request(
True,
'/v2/1234/os-hypervisors/?limit=1&limit=abc')
self.assertRaises(exception.ValidationError,
self.controller.index, req)
def test_index_duplicate_query_parameters_validation(self):
expected = [{
'hypervisor_hostname': 'hyper2',
'id': 2,
'state': 'up',
'status': 'enabled'}
]
params = {
'limit': 1,
'marker': 1,
}
for param, value in params.items():
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors?marker=1&%s=%s&%s=%s' %
(param, value, param, value))
result = self.controller.index(req)
self.assertEqual(expected, result['hypervisors'])
def test_index_pagination_with_additional_filter(self):
expected = {
'hypervisors': [
{'hypervisor_hostname': 'hyper2',
'id': 2,
'state': 'up',
'status': 'enabled'}
],
'hypervisors_links': [
{'href': 'http://localhost/v2/hypervisors?limit=1&marker=2',
'rel': 'next'}
]
}
req = self._get_request(
True, '/v2/1234/os-hypervisors?limit=1&marker=1&additional=3')
result = self.controller.index(req)
self.assertEqual(expected, result)
def test_detail_pagination(self):
req = self._get_request(
True, '/v2/1234/os-hypervisors/detail?limit=1&marker=1')
result = self.controller.detail(req)
link = 'http://localhost/v2/hypervisors/detail?limit=1&marker=2'
expected = {
'hypervisors': [
{'cpu_info': {'arch': 'x86_64',
'features': [],
'model': '',
'topology': {'cores': 1,
'sockets': 1,
'threads': 1},
'vendor': 'fake'},
'current_workload': 2,
'disk_available_least': 100,
'free_disk_gb': 125,
'free_ram_mb': 5120,
'host_ip': netaddr.IPAddress('2.2.2.2'),
'hypervisor_hostname': 'hyper2',
'hypervisor_type': 'xen',
'hypervisor_version': 3,
'id': 2,
'local_gb': 250,
'local_gb_used': 125,
'memory_mb': 10240,
'memory_mb_used': 5120,
'running_vms': 2,
'service': {'disabled_reason': None,
'host': 'compute2',
'id': 2},
'state': 'up',
'status': 'enabled',
'vcpus': 4,
'vcpus_used': 2}
],
'hypervisors_links': [{'href': link, 'rel': 'next'}]
}
self.assertEqual(expected, result)
def test_detail_pagination_with_invalid_marker(self):
req = self._get_request(True,
'/v2/1234/os-hypervisors/detail?marker=99999')
self.assertRaises(exc.HTTPBadRequest,
self.controller.detail, req)
def test_detail_pagination_with_invalid_string_limit(self):
req = self._get_request(True,
'/v2/1234/os-hypervisors/detail?limit=abc')
self.assertRaises(exception.ValidationError,
self.controller.detail, req)
def test_detail_duplicate_query_parameters_with_invalid_string_limit(self):
req = self._get_request(
True,
'/v2/1234/os-hypervisors/detail?limit=1&limit=abc')
self.assertRaises(exception.ValidationError,
self.controller.detail, req)
def test_detail_duplicate_query_parameters_validation(self):
expected = [
{'cpu_info': {'arch': 'x86_64',
'features': [],
'model': '',
'topology': {'cores': 1,
'sockets': 1,
'threads': 1},
'vendor': 'fake'},
'current_workload': 2,
'disk_available_least': 100,
'free_disk_gb': 125,
'free_ram_mb': 5120,
'host_ip': netaddr.IPAddress('2.2.2.2'),
'hypervisor_hostname': 'hyper2',
'hypervisor_type': 'xen',
'hypervisor_version': 3,
'id': 2,
'local_gb': 250,
'local_gb_used': 125,
'memory_mb': 10240,
'memory_mb_used': 5120,
'running_vms': 2,
'service': {'disabled_reason': None,
'host': 'compute2',
'id': 2},
'state': 'up',
'status': 'enabled',
'vcpus': 4,
'vcpus_used': 2}
]
params = {
'limit': 1,
'marker': 1,
}
for param, value in params.items():
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors/detail?marker=1&%s=%s&%s=%s' %
(param, value, param, value))
result = self.controller.detail(req)
self.assertEqual(expected, result['hypervisors'])
def test_detail_pagination_with_additional_filter(self):
link = 'http://localhost/v2/hypervisors/detail?limit=1&marker=2'
expected = {
'hypervisors': [
{'cpu_info': {'arch': 'x86_64',
'features': [],
'model': '',
'topology': {'cores': 1,
'sockets': 1,
'threads': 1},
'vendor': 'fake'},
'current_workload': 2,
'disk_available_least': 100,
'free_disk_gb': 125,
'free_ram_mb': 5120,
'host_ip': netaddr.IPAddress('2.2.2.2'),
'hypervisor_hostname': 'hyper2',
'hypervisor_type': 'xen',
'hypervisor_version': 3,
'id': 2,
'local_gb': 250,
'local_gb_used': 125,
'memory_mb': 10240,
'memory_mb_used': 5120,
'running_vms': 2,
'service': {'disabled_reason': None,
'host': 'compute2',
'id': 2},
'state': 'up',
'status': 'enabled',
'vcpus': 4,
'vcpus_used': 2}
],
'hypervisors_links': [{
'href': link,
'rel': 'next'}]
}
req = self._get_request(
True, '/v2/1234/os-hypervisors/detail?limit=1&marker=1&unknown=2')
result = self.controller.detail(req)
self.assertEqual(expected, result)
class HypervisorsTestV252(HypervisorsTestV233):
"""This is a boundary test to make sure 2.52 works like 2.33."""
api_version = '2.52'
class HypervisorsTestV253(HypervisorsTestV252):
api_version = hypervisors_v21.UUID_FOR_ID_MIN_VERSION
expect_uuid_for_id = True
# This is an expected response for index().
INDEX_HYPER_DICTS = [
dict(id=uuids.hyper1, hypervisor_hostname="hyper1",
state='up', status='enabled'),
dict(id=uuids.hyper2, hypervisor_hostname="hyper2",
state='up', status='enabled')]
def setUp(self):
super(HypervisorsTestV253, self).setUp()
# This is an expected response for detail().
for index, detail_hyper_dict in enumerate(self.DETAIL_HYPERS_DICTS):
detail_hyper_dict['id'] = TEST_HYPERS[index]['uuid']
detail_hyper_dict['service']['id'] = TEST_SERVICES[index].uuid
def test_servers(self):
"""Asserts that calling the servers route after 2.52 fails."""
self.assertRaises(exception.VersionNotFoundForAPIMethod,
self.controller.servers,
self._get_request(True), 'hyper')
def test_servers_with_no_server(self):
"""Tests GET /os-hypervisors?with_servers=1 when there are no
instances on the given host.
"""
with mock.patch.object(self.controller.host_api,
'instance_get_all_by_host',
return_value=[]) as mock_inst_get_all:
req = self._get_request(use_admin_context=True,
url='/os-hypervisors?with_servers=1')
result = self.controller.index(req)
self.assertEqual(dict(hypervisors=self.INDEX_HYPER_DICTS), result)
# instance_get_all_by_host is called for each hypervisor
self.assertEqual(2, mock_inst_get_all.call_count)
mock_inst_get_all.assert_has_calls((
mock.call(req.environ['nova.context'], TEST_HYPERS_OBJ[0].host),
mock.call(req.environ['nova.context'], TEST_HYPERS_OBJ[1].host)))
def test_servers_not_mapped(self):
"""Tests that instance_get_all_by_host fails with HostMappingNotFound.
"""
req = self._get_request(use_admin_context=True,
url='/os-hypervisors?with_servers=1')
with mock.patch.object(
self.controller.host_api, 'instance_get_all_by_host',
side_effect=exception.HostMappingNotFound(name='something')):
result = self.controller.index(req)
self.assertEqual(dict(hypervisors=[]), result)
def test_list_with_servers(self):
"""Tests GET /os-hypervisors?with_servers=True"""
instances = [
objects.InstanceList(objects=[objects.Instance(
id=1, uuid=uuids.hyper1_instance1)]),
objects.InstanceList(objects=[objects.Instance(
id=2, uuid=uuids.hyper2_instance1)])]
with mock.patch.object(self.controller.host_api,
'instance_get_all_by_host',
side_effect=instances) as mock_inst_get_all:
req = self._get_request(use_admin_context=True,
url='/os-hypervisors?with_servers=True')
result = self.controller.index(req)
index_with_servers = copy.deepcopy(self.INDEX_HYPER_DICTS)
index_with_servers[0]['servers'] = [
{'name': 'instance-00000001', 'uuid': uuids.hyper1_instance1}]
index_with_servers[1]['servers'] = [
{'name': 'instance-00000002', 'uuid': uuids.hyper2_instance1}]
self.assertEqual(dict(hypervisors=index_with_servers), result)
# instance_get_all_by_host is called for each hypervisor
self.assertEqual(2, mock_inst_get_all.call_count)
mock_inst_get_all.assert_has_calls((
mock.call(req.environ['nova.context'], TEST_HYPERS_OBJ[0].host),
mock.call(req.environ['nova.context'], TEST_HYPERS_OBJ[1].host)))
def test_list_with_servers_invalid_parameter(self):
"""Tests using an invalid with_servers query parameter."""
req = self._get_request(use_admin_context=True,
url='/os-hypervisors?with_servers=invalid')
self.assertRaises(
exception.ValidationError, self.controller.index, req)
def test_list_with_hostname_pattern_and_paging_parameters(self):
"""This is a negative test to validate that trying to list hypervisors
with a hostname pattern and paging parameters results in a 400 error.
"""
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors?hypervisor_hostname_pattern=foo&'
'limit=1&marker=%s' % uuids.marker)
ex = self.assertRaises(exc.HTTPBadRequest, self.controller.index, req)
self.assertIn('Paging over hypervisors with the '
'hypervisor_hostname_pattern query parameter is not '
'supported.', six.text_type(ex))
def test_servers_with_non_integer_hypervisor_id(self):
"""This is a poorly named test, it's really checking the 404 case where
there is no match for the hostname pattern.
"""
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors?with_servers=yes&'
'hypervisor_hostname_pattern=shenzhen')
with mock.patch.object(self.controller.host_api,
'compute_node_search_by_hypervisor',
return_value=objects.ComputeNodeList()) as s:
self.assertRaises(exc.HTTPNotFound, self.controller.index, req)
s.assert_called_once_with(req.environ['nova.context'], 'shenzhen')
def test_servers_non_admin(self):
"""There is no reason to test this for 2.53 since the
/os-hypervisors/servers route is deprecated.
"""
pass
def test_servers_non_id(self):
"""There is no reason to test this for 2.53 since the
/os-hypervisors/servers route is deprecated.
"""
pass
def test_search_old_route(self):
"""Asserts that calling the search route after 2.52 fails."""
self.assertRaises(exception.VersionNotFoundForAPIMethod,
self.controller.search,
self._get_request(True), 'hyper')
def test_search(self):
"""Test listing hypervisors with details and using the
hypervisor_hostname_pattern query string.
"""
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors?hypervisor_hostname_pattern=shenzhen')
with mock.patch.object(self.controller.host_api,
'compute_node_search_by_hypervisor',
return_value=objects.ComputeNodeList(
objects=[TEST_HYPERS_OBJ[0]])) as s:
result = self.controller.detail(req)
s.assert_called_once_with(req.environ['nova.context'], 'shenzhen')
expected = {
'hypervisors': [
{'cpu_info': {'arch': 'x86_64',
'features': [],
'model': '',
'topology': {'cores': 1,
'sockets': 1,
'threads': 1},
'vendor': 'fake'},
'current_workload': 2,
'disk_available_least': 100,
'free_disk_gb': 125,
'free_ram_mb': 5120,
'host_ip': netaddr.IPAddress('1.1.1.1'),
'hypervisor_hostname': 'hyper1',
'hypervisor_type': 'xen',
'hypervisor_version': 3,
'id': TEST_HYPERS_OBJ[0].uuid,
'local_gb': 250,
'local_gb_used': 125,
'memory_mb': 10240,
'memory_mb_used': 5120,
'running_vms': 2,
'service': {'disabled_reason': None,
'host': 'compute1',
'id': TEST_SERVICES[0].uuid},
'state': 'up',
'status': 'enabled',
'vcpus': 4,
'vcpus_used': 2}
]
}
# There are no links when using the hypervisor_hostname_pattern
# query string since we can't page using a pattern matcher.
self.assertNotIn('hypervisors_links', result)
self.assertDictEqual(expected, result)
def test_search_invalid_hostname_pattern_parameter(self):
"""Tests passing an invalid hypervisor_hostname_pattern query
parameter.
"""
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors?hypervisor_hostname_pattern=invalid~host')
self.assertRaises(
exception.ValidationError, self.controller.detail, req)
def test_search_non_exist(self):
"""This is a duplicate of test_servers_with_non_integer_hypervisor_id.
"""
pass
def test_search_non_admin(self):
"""There is no reason to test this for 2.53 since the
/os-hypervisors/search route is deprecated.
"""
pass
def test_search_unmapped(self):
"""This is already tested with test_index_compute_host_not_mapped."""
pass
def test_show_non_integer_id(self):
"""There is no reason to test this for 2.53 since 2.53 requires a
non-integer id (requires a uuid).
"""
pass
def test_show_integer_id(self):
"""Tests that we get a 400 if passed a hypervisor integer id to show().
"""
req = self._get_request(True)
ex = self.assertRaises(exc.HTTPBadRequest,
self.controller.show, req, '1')
self.assertIn('Invalid uuid 1', six.text_type(ex))
def test_show_with_servers_invalid_parameter(self):
"""Tests passing an invalid value for the with_servers query parameter
to the show() method to make sure the query parameter is validated.
"""
hyper_id = self._get_hyper_id()
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors/%s?with_servers=invalid' % hyper_id)
ex = self.assertRaises(
exception.ValidationError, self.controller.show, req, hyper_id)
self.assertIn('with_servers', six.text_type(ex))
def test_show_with_servers_host_mapping_not_found(self):
"""Tests that a 404 is returned if instance_get_all_by_host raises
HostMappingNotFound.
"""
hyper_id = self._get_hyper_id()
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors/%s?with_servers=true' % hyper_id)
with mock.patch.object(
self.controller.host_api, 'instance_get_all_by_host',
side_effect=exception.HostMappingNotFound(name=hyper_id)):
self.assertRaises(exc.HTTPNotFound, self.controller.show,
req, hyper_id)
def test_show_with_servers(self):
"""Tests the show() result when servers are included in the output."""
instances = objects.InstanceList(objects=[objects.Instance(
id=1, uuid=uuids.hyper1_instance1)])
hyper_id = self._get_hyper_id()
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors/%s?with_servers=on' % hyper_id)
with mock.patch.object(self.controller.host_api,
'instance_get_all_by_host',
return_value=instances) as mock_inst_get_all:
result = self.controller.show(req, hyper_id)
show_with_servers = copy.deepcopy(self.DETAIL_HYPERS_DICTS[0])
show_with_servers['servers'] = [
{'name': 'instance-00000001', 'uuid': uuids.hyper1_instance1}]
self.assertDictEqual(dict(hypervisor=show_with_servers), result)
# instance_get_all_by_host is called
mock_inst_get_all.assert_called_once_with(
req.environ['nova.context'], TEST_HYPERS_OBJ[0].host)
def test_uptime_non_integer_id(self):
"""There is no reason to test this for 2.53 since 2.53 requires a
non-integer id (requires a uuid).
"""
pass
def test_uptime_integer_id(self):
"""Tests that we get a 400 if passed a hypervisor integer id to
uptime().
"""
req = self._get_request(True)
ex = self.assertRaises(exc.HTTPBadRequest,
self.controller.uptime, req, '1')
self.assertIn('Invalid uuid 1', six.text_type(ex))
def test_detail_pagination(self):
"""Tests details paging with uuid markers."""
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors/detail?limit=1&marker=%s' %
TEST_HYPERS_OBJ[0].uuid)
result = self.controller.detail(req)
link = ('http://localhost/v2/hypervisors/detail?limit=1&marker=%s' %
TEST_HYPERS_OBJ[1].uuid)
expected = {
'hypervisors': [
{'cpu_info': {'arch': 'x86_64',
'features': [],
'model': '',
'topology': {'cores': 1,
'sockets': 1,
'threads': 1},
'vendor': 'fake'},
'current_workload': 2,
'disk_available_least': 100,
'free_disk_gb': 125,
'free_ram_mb': 5120,
'host_ip': netaddr.IPAddress('2.2.2.2'),
'hypervisor_hostname': 'hyper2',
'hypervisor_type': 'xen',
'hypervisor_version': 3,
'id': TEST_HYPERS_OBJ[1].uuid,
'local_gb': 250,
'local_gb_used': 125,
'memory_mb': 10240,
'memory_mb_used': 5120,
'running_vms': 2,
'service': {'disabled_reason': None,
'host': 'compute2',
'id': TEST_SERVICES[1].uuid},
'state': 'up',
'status': 'enabled',
'vcpus': 4,
'vcpus_used': 2}
],
'hypervisors_links': [{'href': link, 'rel': 'next'}]
}
self.assertEqual(expected, result)
def test_detail_pagination_with_invalid_marker(self):
"""Tests detail paging with an invalid marker (not found)."""
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors/detail?marker=%s' % uuids.invalid_marker)
self.assertRaises(exc.HTTPBadRequest,
self.controller.detail, req)
def test_detail_pagination_with_additional_filter(self):
req = self._get_request(
True, '/v2/1234/os-hypervisors/detail?limit=1&marker=9&unknown=2')
self.assertRaises(exception.ValidationError,
self.controller.detail, req)
def test_detail_duplicate_query_parameters_validation(self):
"""Tests that the list Detail query parameter schema enforces only a
single entry for any query parameter.
"""
params = {
'limit': 1,
'marker': uuids.marker,
'hypervisor_hostname_pattern': 'foo',
'with_servers': 'true'
}
for param, value in params.items():
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors/detail?%s=%s&%s=%s' %
(param, value, param, value))
self.assertRaises(exception.ValidationError,
self.controller.detail, req)
def test_index_pagination(self):
"""Tests index paging with uuid markers."""
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors?limit=1&marker=%s' %
TEST_HYPERS_OBJ[0].uuid)
result = self.controller.index(req)
link = ('http://localhost/v2/hypervisors?limit=1&marker=%s' %
TEST_HYPERS_OBJ[1].uuid)
expected = {
'hypervisors': [{
'hypervisor_hostname': 'hyper2',
'id': TEST_HYPERS_OBJ[1].uuid,
'state': 'up',
'status': 'enabled'
}],
'hypervisors_links': [{'href': link, 'rel': 'next'}]
}
self.assertEqual(expected, result)
def test_index_pagination_with_invalid_marker(self):
"""Tests index paging with an invalid marker (not found)."""
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors?marker=%s' % uuids.invalid_marker)
self.assertRaises(exc.HTTPBadRequest,
self.controller.index, req)
def test_index_pagination_with_additional_filter(self):
req = self._get_request(
True, '/v2/1234/os-hypervisors/?limit=1&marker=9&unknown=2')
self.assertRaises(exception.ValidationError,
self.controller.index, req)
def test_index_duplicate_query_parameters_validation(self):
"""Tests that the list query parameter schema enforces only a single
entry for any query parameter.
"""
params = {
'limit': 1,
'marker': uuids.marker,
'hypervisor_hostname_pattern': 'foo',
'with_servers': 'true'
}
for param, value in params.items():
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors?%s=%s&%s=%s' %
(param, value, param, value))
self.assertRaises(exception.ValidationError,
self.controller.index, req)
def test_show_duplicate_query_parameters_validation(self):
"""Tests that the show query parameter schema enforces only a single
entry for any query parameter.
"""
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors/%s?with_servers=1&with_servers=1' %
uuids.hyper1)
self.assertRaises(exception.ValidationError,
self.controller.show, req, uuids.hyper1)
| 41.231655
| 79
| 0.570858
|
import copy
import mock
import netaddr
from oslo_serialization import jsonutils
from oslo_utils.fixture import uuidsentinel as uuids
import six
from webob import exc
from nova.api.openstack.compute import hypervisors \
as hypervisors_v21
from nova import exception
from nova import objects
from nova import test
from nova.tests.unit.api.openstack import fakes
from nova.tests.unit import fake_instance
CPU_INFO = """
{"arch": "x86_64",
"vendor": "fake",
"topology": {"cores": 1, "threads": 1, "sockets": 1},
"features": [],
"model": ""}"""
TEST_HYPERS = [
dict(id=1,
uuid=uuids.hyper1,
service_id=1,
host="compute1",
vcpus=4,
memory_mb=10 * 1024,
local_gb=250,
vcpus_used=2,
memory_mb_used=5 * 1024,
local_gb_used=125,
hypervisor_type="xen",
hypervisor_version=3,
hypervisor_hostname="hyper1",
free_ram_mb=5 * 1024,
free_disk_gb=125,
current_workload=2,
running_vms=2,
cpu_info=CPU_INFO,
disk_available_least=100,
host_ip=netaddr.IPAddress('1.1.1.1')),
dict(id=2,
uuid=uuids.hyper2,
service_id=2,
host="compute2",
vcpus=4,
memory_mb=10 * 1024,
local_gb=250,
vcpus_used=2,
memory_mb_used=5 * 1024,
local_gb_used=125,
hypervisor_type="xen",
hypervisor_version=3,
hypervisor_hostname="hyper2",
free_ram_mb=5 * 1024,
free_disk_gb=125,
current_workload=2,
running_vms=2,
cpu_info=CPU_INFO,
disk_available_least=100,
host_ip=netaddr.IPAddress('2.2.2.2'))]
TEST_SERVICES = [
objects.Service(id=1,
uuid=uuids.service1,
host="compute1",
binary="nova-compute",
topic="compute_topic",
report_count=5,
disabled=False,
disabled_reason=None,
availability_zone="nova"),
objects.Service(id=2,
uuid=uuids.service2,
host="compute2",
binary="nova-compute",
topic="compute_topic",
report_count=5,
disabled=False,
disabled_reason=None,
availability_zone="nova"),
]
TEST_HYPERS_OBJ = [objects.ComputeNode(**hyper_dct)
for hyper_dct in TEST_HYPERS]
TEST_HYPERS[0].update({'service': TEST_SERVICES[0]})
TEST_HYPERS[1].update({'service': TEST_SERVICES[1]})
TEST_SERVERS = [dict(name="inst1", uuid=uuids.instance_1, host="compute1"),
dict(name="inst2", uuid=uuids.instance_2, host="compute2"),
dict(name="inst3", uuid=uuids.instance_3, host="compute1"),
dict(name="inst4", uuid=uuids.instance_4, host="compute2")]
def fake_compute_node_get_all(context, limit=None, marker=None):
if marker in ['99999', uuids.invalid_marker]:
raise exception.MarkerNotFound(marker)
marker_found = True if marker is None else False
output = []
for hyper in TEST_HYPERS_OBJ:
if not marker_found and marker in (str(hyper.id), hyper.uuid):
marker_found = True
elif marker_found:
if limit is None or len(output) < int(limit):
output.append(hyper)
return output
def fake_compute_node_search_by_hypervisor(context, hypervisor_re):
return TEST_HYPERS_OBJ
def fake_compute_node_get(context, compute_id):
for hyper in TEST_HYPERS_OBJ:
if hyper.uuid == compute_id or hyper.id == int(compute_id):
return hyper
raise exception.ComputeHostNotFound(host=compute_id)
def fake_service_get_by_compute_host(context, host):
for service in TEST_SERVICES:
if service.host == host:
return service
def fake_compute_node_statistics(context):
result = dict(
count=0,
vcpus=0,
memory_mb=0,
local_gb=0,
vcpus_used=0,
memory_mb_used=0,
local_gb_used=0,
free_ram_mb=0,
free_disk_gb=0,
current_workload=0,
running_vms=0,
disk_available_least=0,
)
for hyper in TEST_HYPERS_OBJ:
for key in result:
if key == 'count':
result[key] += 1
else:
result[key] += getattr(hyper, key)
return result
def fake_instance_get_all_by_host(context, host):
results = []
for inst in TEST_SERVERS:
if inst['host'] == host:
inst_obj = fake_instance.fake_instance_obj(context, **inst)
results.append(inst_obj)
return results
class HypervisorsTestV21(test.NoDBTestCase):
api_version = '2.1'
expect_uuid_for_id = False
TEST_HYPERS_OBJ = copy.deepcopy(TEST_HYPERS_OBJ)
TEST_SERVICES = copy.deepcopy(TEST_SERVICES)
TEST_SERVERS = copy.deepcopy(TEST_SERVERS)
DETAIL_HYPERS_DICTS = copy.deepcopy(TEST_HYPERS)
del DETAIL_HYPERS_DICTS[0]['service_id']
del DETAIL_HYPERS_DICTS[1]['service_id']
del DETAIL_HYPERS_DICTS[0]['host']
del DETAIL_HYPERS_DICTS[1]['host']
del DETAIL_HYPERS_DICTS[0]['uuid']
del DETAIL_HYPERS_DICTS[1]['uuid']
DETAIL_HYPERS_DICTS[0].update({'state': 'up',
'status': 'enabled',
'service': dict(id=1, host='compute1',
disabled_reason=None)})
DETAIL_HYPERS_DICTS[1].update({'state': 'up',
'status': 'enabled',
'service': dict(id=2, host='compute2',
disabled_reason=None)})
INDEX_HYPER_DICTS = [
dict(id=1, hypervisor_hostname="hyper1",
state='up', status='enabled'),
dict(id=2, hypervisor_hostname="hyper2",
state='up', status='enabled')]
DETAIL_NULL_CPUINFO_DICT = {'': '', None: None}
def _get_request(self, use_admin_context, url=''):
return fakes.HTTPRequest.blank(url,
use_admin_context=use_admin_context,
version=self.api_version)
def _set_up_controller(self):
self.controller = hypervisors_v21.HypervisorsController()
self.controller.servicegroup_api.service_is_up = mock.MagicMock(
return_value=True)
def _get_hyper_id(self):
return (self.TEST_HYPERS_OBJ[0].uuid if self.expect_uuid_for_id
else self.TEST_HYPERS_OBJ[0].id)
def setUp(self):
super(HypervisorsTestV21, self).setUp()
self._set_up_controller()
self.rule_hyp_show = "os_compute_api:os-hypervisors"
host_api = self.controller.host_api
host_api.compute_node_get_all = mock.MagicMock(
side_effect=fake_compute_node_get_all)
host_api.service_get_by_compute_host = mock.MagicMock(
side_effect=fake_service_get_by_compute_host)
host_api.compute_node_search_by_hypervisor = mock.MagicMock(
side_effect=fake_compute_node_search_by_hypervisor)
host_api.compute_node_get = mock.MagicMock(
side_effect=fake_compute_node_get)
self.stub_out('nova.db.api.compute_node_statistics',
fake_compute_node_statistics)
def test_view_hypervisor_nodetail_noservers(self):
req = self._get_request(True)
result = self.controller._view_hypervisor(
self.TEST_HYPERS_OBJ[0], self.TEST_SERVICES[0], False, req)
self.assertEqual(self.INDEX_HYPER_DICTS[0], result)
def test_view_hypervisor_detail_noservers(self):
req = self._get_request(True)
result = self.controller._view_hypervisor(
self.TEST_HYPERS_OBJ[0], self.TEST_SERVICES[0], True, req)
self.assertEqual(self.DETAIL_HYPERS_DICTS[0], result)
def test_view_hypervisor_servers(self):
req = self._get_request(True)
result = self.controller._view_hypervisor(self.TEST_HYPERS_OBJ[0],
self.TEST_SERVICES[0],
False, req,
self.TEST_SERVERS)
expected_dict = copy.deepcopy(self.INDEX_HYPER_DICTS[0])
expected_dict.update({'servers': [
dict(name="inst1", uuid=uuids.instance_1),
dict(name="inst2", uuid=uuids.instance_2),
dict(name="inst3", uuid=uuids.instance_3),
dict(name="inst4", uuid=uuids.instance_4)]})
self.assertEqual(expected_dict, result)
def _test_view_hypervisor_detail_cpuinfo_null(self, cpu_info):
req = self._get_request(True)
test_hypervisor_obj = copy.deepcopy(self.TEST_HYPERS_OBJ[0])
test_hypervisor_obj.cpu_info = cpu_info
result = self.controller._view_hypervisor(test_hypervisor_obj,
self.TEST_SERVICES[0],
True, req)
expected_dict = copy.deepcopy(self.DETAIL_HYPERS_DICTS[0])
expected_dict.update({'cpu_info':
self.DETAIL_NULL_CPUINFO_DICT[cpu_info]})
self.assertEqual(result, expected_dict)
def test_view_hypervisor_detail_cpuinfo_empty_string(self):
self._test_view_hypervisor_detail_cpuinfo_null('')
def test_view_hypervisor_detail_cpuinfo_none(self):
self._test_view_hypervisor_detail_cpuinfo_null(None)
def test_index(self):
req = self._get_request(True)
result = self.controller.index(req)
self.assertEqual(dict(hypervisors=self.INDEX_HYPER_DICTS), result)
def test_index_non_admin(self):
req = self._get_request(False)
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.index, req)
def test_index_compute_host_not_found(self):
compute_nodes = objects.ComputeNodeList(objects=[
objects.ComputeNode(**TEST_HYPERS[0]),
objects.ComputeNode(**TEST_HYPERS[1])
])
def fake_service_get_by_compute_host(context, host):
if host == TEST_HYPERS[0]['host']:
return TEST_SERVICES[0]
raise exception.ComputeHostNotFound(host=host)
@mock.patch.object(self.controller.host_api, 'compute_node_get_all',
return_value=compute_nodes)
@mock.patch.object(self.controller.host_api,
'service_get_by_compute_host',
fake_service_get_by_compute_host)
def _test(self, compute_node_get_all):
req = self._get_request(True)
result = self.controller.index(req)
self.assertEqual(1, len(result['hypervisors']))
expected = {
'id': compute_nodes[0].uuid if self.expect_uuid_for_id
else compute_nodes[0].id,
'hypervisor_hostname': compute_nodes[0].hypervisor_hostname,
'state': 'up',
'status': 'enabled',
}
self.assertDictEqual(expected, result['hypervisors'][0])
_test(self)
def test_index_compute_host_not_mapped(self):
compute_nodes = objects.ComputeNodeList(objects=[
objects.ComputeNode(**TEST_HYPERS[0]),
objects.ComputeNode(**TEST_HYPERS[1])
])
def fake_service_get_by_compute_host(context, host):
if host == TEST_HYPERS[0]['host']:
return TEST_SERVICES[0]
raise exception.HostMappingNotFound(name=host)
@mock.patch.object(self.controller.host_api, 'compute_node_get_all',
return_value=compute_nodes)
@mock.patch.object(self.controller.host_api,
'service_get_by_compute_host',
fake_service_get_by_compute_host)
def _test(self, compute_node_get_all):
req = self._get_request(True)
result = self.controller.index(req)
self.assertEqual(1, len(result['hypervisors']))
expected = {
'id': compute_nodes[0].uuid if self.expect_uuid_for_id
else compute_nodes[0].id,
'hypervisor_hostname': compute_nodes[0].hypervisor_hostname,
'state': 'up',
'status': 'enabled',
}
self.assertDictEqual(expected, result['hypervisors'][0])
_test(self)
def test_detail(self):
req = self._get_request(True)
result = self.controller.detail(req)
self.assertEqual(dict(hypervisors=self.DETAIL_HYPERS_DICTS), result)
def test_detail_non_admin(self):
req = self._get_request(False)
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.detail, req)
def test_detail_compute_host_not_found(self):
compute_nodes = objects.ComputeNodeList(objects=[
objects.ComputeNode(**TEST_HYPERS[0]),
objects.ComputeNode(**TEST_HYPERS[1])
])
def fake_service_get_by_compute_host(context, host):
if host == TEST_HYPERS[0]['host']:
return TEST_SERVICES[0]
raise exception.ComputeHostNotFound(host=host)
@mock.patch.object(self.controller.host_api, 'compute_node_get_all',
return_value=compute_nodes)
@mock.patch.object(self.controller.host_api,
'service_get_by_compute_host',
fake_service_get_by_compute_host)
def _test(self, compute_node_get_all):
req = self._get_request(True)
result = self.controller.detail(req)
self.assertEqual(1, len(result['hypervisors']))
expected = {
'id': compute_nodes[0].id,
'hypervisor_hostname': compute_nodes[0].hypervisor_hostname,
'state': 'up',
'status': 'enabled',
}
# the subset we care about and there are more keys than what index
# would return
hypervisor = result['hypervisors'][0]
self.assertTrue(
set(expected.keys()).issubset(set(hypervisor.keys())))
self.assertGreater(len(hypervisor.keys()), len(expected.keys()))
self.assertEqual(compute_nodes[0].hypervisor_hostname,
hypervisor['hypervisor_hostname'])
_test(self)
def test_detail_compute_host_not_mapped(self):
# two computes, a matching service only exists for the first one
compute_nodes = objects.ComputeNodeList(objects=[
objects.ComputeNode(**TEST_HYPERS[0]),
objects.ComputeNode(**TEST_HYPERS[1])
])
def fake_service_get_by_compute_host(context, host):
if host == TEST_HYPERS[0]['host']:
return TEST_SERVICES[0]
raise exception.HostMappingNotFound(name=host)
@mock.patch.object(self.controller.host_api, 'compute_node_get_all',
return_value=compute_nodes)
@mock.patch.object(self.controller.host_api,
'service_get_by_compute_host',
fake_service_get_by_compute_host)
def _test(self, compute_node_get_all):
req = self._get_request(True)
result = self.controller.detail(req)
self.assertEqual(1, len(result['hypervisors']))
expected = {
'id': compute_nodes[0].id,
'hypervisor_hostname': compute_nodes[0].hypervisor_hostname,
'state': 'up',
'status': 'enabled',
}
# we don't care about all of the details, just make sure we get
hypervisor = result['hypervisors'][0]
self.assertTrue(
set(expected.keys()).issubset(set(hypervisor.keys())))
self.assertGreater(len(hypervisor.keys()), len(expected.keys()))
self.assertEqual(compute_nodes[0].hypervisor_hostname,
hypervisor['hypervisor_hostname'])
_test(self)
def test_show_compute_host_not_mapped(self):
@mock.patch.object(self.controller.host_api, 'compute_node_get',
return_value=self.TEST_HYPERS_OBJ[0])
@mock.patch.object(self.controller.host_api,
'service_get_by_compute_host')
def _test(self, mock_service, mock_compute_node_get):
req = self._get_request(True)
mock_service.side_effect = exception.HostMappingNotFound(
name='foo')
hyper_id = self._get_hyper_id()
self.assertRaises(exc.HTTPNotFound, self.controller.show,
req, hyper_id)
self.assertTrue(mock_service.called)
mock_compute_node_get.assert_called_once_with(mock.ANY, hyper_id)
_test(self)
def test_show_noid(self):
req = self._get_request(True)
hyperid = uuids.hyper3 if self.expect_uuid_for_id else '3'
self.assertRaises(exc.HTTPNotFound, self.controller.show, req, hyperid)
def test_show_non_integer_id(self):
req = self._get_request(True)
self.assertRaises(exc.HTTPNotFound, self.controller.show, req, 'abc')
def test_show_withid(self):
req = self._get_request(True)
hyper_id = self._get_hyper_id()
result = self.controller.show(req, hyper_id)
self.assertEqual(dict(hypervisor=self.DETAIL_HYPERS_DICTS[0]), result)
def test_show_non_admin(self):
req = self._get_request(False)
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.show, req,
self._get_hyper_id())
def test_uptime_noid(self):
req = self._get_request(True)
hyper_id = uuids.hyper3 if self.expect_uuid_for_id else '3'
self.assertRaises(exc.HTTPNotFound, self.controller.uptime, req,
hyper_id)
def test_uptime_notimplemented(self):
with mock.patch.object(self.controller.host_api, 'get_host_uptime',
side_effect=exc.HTTPNotImplemented()
) as mock_get_uptime:
req = self._get_request(True)
hyper_id = self._get_hyper_id()
self.assertRaises(exc.HTTPNotImplemented,
self.controller.uptime, req, hyper_id)
self.assertEqual(1, mock_get_uptime.call_count)
def test_uptime_implemented(self):
with mock.patch.object(self.controller.host_api, 'get_host_uptime',
return_value="fake uptime"
) as mock_get_uptime:
req = self._get_request(True)
hyper_id = self._get_hyper_id()
result = self.controller.uptime(req, hyper_id)
expected_dict = copy.deepcopy(self.INDEX_HYPER_DICTS[0])
expected_dict.update({'uptime': "fake uptime"})
self.assertEqual(dict(hypervisor=expected_dict), result)
self.assertEqual(1, mock_get_uptime.call_count)
def test_uptime_non_integer_id(self):
req = self._get_request(True)
self.assertRaises(exc.HTTPNotFound, self.controller.uptime, req, 'abc')
def test_uptime_non_admin(self):
req = self._get_request(False)
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.uptime, req,
self.TEST_HYPERS_OBJ[0].id)
def test_uptime_hypervisor_down(self):
with mock.patch.object(self.controller.host_api, 'get_host_uptime',
side_effect=exception.ComputeServiceUnavailable(host='dummy')
) as mock_get_uptime:
req = self._get_request(True)
hyper_id = self._get_hyper_id()
self.assertRaises(exc.HTTPBadRequest,
self.controller.uptime, req, hyper_id)
mock_get_uptime.assert_called_once_with(
mock.ANY, self.TEST_HYPERS_OBJ[0].host)
def test_uptime_hypervisor_not_mapped_service_get(self):
@mock.patch.object(self.controller.host_api, 'compute_node_get')
@mock.patch.object(self.controller.host_api, 'get_host_uptime')
@mock.patch.object(self.controller.host_api,
'service_get_by_compute_host',
side_effect=exception.HostMappingNotFound(
name='dummy'))
def _test(mock_get, _, __):
req = self._get_request(True)
hyper_id = self._get_hyper_id()
self.assertRaises(exc.HTTPNotFound,
self.controller.uptime, req, hyper_id)
self.assertTrue(mock_get.called)
_test()
def test_uptime_hypervisor_not_mapped(self):
with mock.patch.object(self.controller.host_api, 'get_host_uptime',
side_effect=exception.HostMappingNotFound(name='dummy')
) as mock_get_uptime:
req = self._get_request(True)
hyper_id = self._get_hyper_id()
self.assertRaises(exc.HTTPNotFound,
self.controller.uptime, req, hyper_id)
mock_get_uptime.assert_called_once_with(
mock.ANY, self.TEST_HYPERS_OBJ[0].host)
def test_search(self):
req = self._get_request(True)
result = self.controller.search(req, 'hyper')
self.assertEqual(dict(hypervisors=self.INDEX_HYPER_DICTS), result)
def test_search_non_admin(self):
req = self._get_request(False)
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.search, req,
self.TEST_HYPERS_OBJ[0].id)
def test_search_non_exist(self):
with mock.patch.object(self.controller.host_api,
'compute_node_search_by_hypervisor',
return_value=[]) as mock_node_search:
req = self._get_request(True)
self.assertRaises(exc.HTTPNotFound, self.controller.search,
req, 'a')
self.assertEqual(1, mock_node_search.call_count)
def test_search_unmapped(self):
@mock.patch.object(self.controller.host_api,
'compute_node_search_by_hypervisor')
@mock.patch.object(self.controller.host_api,
'service_get_by_compute_host')
def _test(mock_service, mock_search):
mock_search.return_value = [mock.MagicMock()]
mock_service.side_effect = exception.HostMappingNotFound(
name='foo')
req = self._get_request(True)
self.assertRaises(exc.HTTPNotFound, self.controller.search,
req, 'a')
self.assertTrue(mock_service.called)
_test()
@mock.patch.object(objects.InstanceList, 'get_by_host',
side_effect=fake_instance_get_all_by_host)
def test_servers(self, mock_get):
req = self._get_request(True)
result = self.controller.servers(req, 'hyper')
expected_dict = copy.deepcopy(self.INDEX_HYPER_DICTS)
expected_dict[0].update({'servers': [
dict(uuid=uuids.instance_1),
dict(uuid=uuids.instance_3)]})
expected_dict[1].update({'servers': [
dict(uuid=uuids.instance_2),
dict(uuid=uuids.instance_4)]})
for output in result['hypervisors']:
servers = output['servers']
for server in servers:
del server['name']
self.assertEqual(dict(hypervisors=expected_dict), result)
def test_servers_not_mapped(self):
req = self._get_request(True)
with mock.patch.object(self.controller.host_api,
'instance_get_all_by_host') as m:
m.side_effect = exception.HostMappingNotFound(name='something')
self.assertRaises(exc.HTTPNotFound,
self.controller.servers, req, 'hyper')
def test_servers_non_id(self):
with mock.patch.object(self.controller.host_api,
'compute_node_search_by_hypervisor',
return_value=[]) as mock_node_search:
req = self._get_request(True)
self.assertRaises(exc.HTTPNotFound,
self.controller.servers,
req, '115')
self.assertEqual(1, mock_node_search.call_count)
def test_servers_non_admin(self):
req = self._get_request(False)
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.servers, req,
self.TEST_HYPERS_OBJ[0].id)
def test_servers_with_non_integer_hypervisor_id(self):
with mock.patch.object(self.controller.host_api,
'compute_node_search_by_hypervisor',
return_value=[]) as mock_node_search:
req = self._get_request(True)
self.assertRaises(exc.HTTPNotFound,
self.controller.servers, req, 'abc')
self.assertEqual(1, mock_node_search.call_count)
def test_servers_with_no_server(self):
with mock.patch.object(self.controller.host_api,
'instance_get_all_by_host',
return_value=[]) as mock_inst_get_all:
req = self._get_request(True)
result = self.controller.servers(req, self.TEST_HYPERS_OBJ[0].id)
self.assertEqual(dict(hypervisors=self.INDEX_HYPER_DICTS), result)
self.assertTrue(mock_inst_get_all.called)
def test_statistics(self):
req = self._get_request(True)
result = self.controller.statistics(req)
self.assertEqual(dict(hypervisor_statistics=dict(
count=2,
vcpus=8,
memory_mb=20 * 1024,
local_gb=500,
vcpus_used=4,
memory_mb_used=10 * 1024,
local_gb_used=250,
free_ram_mb=10 * 1024,
free_disk_gb=250,
current_workload=4,
running_vms=4,
disk_available_least=200)), result)
def test_statistics_non_admin(self):
req = self._get_request(False)
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.statistics, req)
class HypervisorsTestV228(HypervisorsTestV21):
api_version = '2.28'
DETAIL_HYPERS_DICTS = copy.deepcopy(HypervisorsTestV21.DETAIL_HYPERS_DICTS)
DETAIL_HYPERS_DICTS[0]['cpu_info'] = jsonutils.loads(CPU_INFO)
DETAIL_HYPERS_DICTS[1]['cpu_info'] = jsonutils.loads(CPU_INFO)
DETAIL_NULL_CPUINFO_DICT = {'': {}, None: {}}
class HypervisorsTestV233(HypervisorsTestV228):
api_version = '2.33'
def test_index_pagination(self):
req = self._get_request(True,
'/v2/1234/os-hypervisors?limit=1&marker=1')
result = self.controller.index(req)
expected = {
'hypervisors': [
{'hypervisor_hostname': 'hyper2',
'id': 2,
'state': 'up',
'status': 'enabled'}
],
'hypervisors_links': [
{'href': 'http://localhost/v2/hypervisors?limit=1&marker=2',
'rel': 'next'}
]
}
self.assertEqual(expected, result)
def test_index_pagination_with_invalid_marker(self):
req = self._get_request(True,
'/v2/1234/os-hypervisors?marker=99999')
self.assertRaises(exc.HTTPBadRequest,
self.controller.index, req)
def test_index_pagination_with_invalid_non_int_limit(self):
req = self._get_request(True,
'/v2/1234/os-hypervisors?limit=-9')
self.assertRaises(exception.ValidationError,
self.controller.index, req)
def test_index_pagination_with_invalid_string_limit(self):
req = self._get_request(True,
'/v2/1234/os-hypervisors?limit=abc')
self.assertRaises(exception.ValidationError,
self.controller.index, req)
def test_index_duplicate_query_parameters_with_invalid_string_limit(self):
req = self._get_request(
True,
'/v2/1234/os-hypervisors/?limit=1&limit=abc')
self.assertRaises(exception.ValidationError,
self.controller.index, req)
def test_index_duplicate_query_parameters_validation(self):
expected = [{
'hypervisor_hostname': 'hyper2',
'id': 2,
'state': 'up',
'status': 'enabled'}
]
params = {
'limit': 1,
'marker': 1,
}
for param, value in params.items():
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors?marker=1&%s=%s&%s=%s' %
(param, value, param, value))
result = self.controller.index(req)
self.assertEqual(expected, result['hypervisors'])
def test_index_pagination_with_additional_filter(self):
expected = {
'hypervisors': [
{'hypervisor_hostname': 'hyper2',
'id': 2,
'state': 'up',
'status': 'enabled'}
],
'hypervisors_links': [
{'href': 'http://localhost/v2/hypervisors?limit=1&marker=2',
'rel': 'next'}
]
}
req = self._get_request(
True, '/v2/1234/os-hypervisors?limit=1&marker=1&additional=3')
result = self.controller.index(req)
self.assertEqual(expected, result)
def test_detail_pagination(self):
req = self._get_request(
True, '/v2/1234/os-hypervisors/detail?limit=1&marker=1')
result = self.controller.detail(req)
link = 'http://localhost/v2/hypervisors/detail?limit=1&marker=2'
expected = {
'hypervisors': [
{'cpu_info': {'arch': 'x86_64',
'features': [],
'model': '',
'topology': {'cores': 1,
'sockets': 1,
'threads': 1},
'vendor': 'fake'},
'current_workload': 2,
'disk_available_least': 100,
'free_disk_gb': 125,
'free_ram_mb': 5120,
'host_ip': netaddr.IPAddress('2.2.2.2'),
'hypervisor_hostname': 'hyper2',
'hypervisor_type': 'xen',
'hypervisor_version': 3,
'id': 2,
'local_gb': 250,
'local_gb_used': 125,
'memory_mb': 10240,
'memory_mb_used': 5120,
'running_vms': 2,
'service': {'disabled_reason': None,
'host': 'compute2',
'id': 2},
'state': 'up',
'status': 'enabled',
'vcpus': 4,
'vcpus_used': 2}
],
'hypervisors_links': [{'href': link, 'rel': 'next'}]
}
self.assertEqual(expected, result)
def test_detail_pagination_with_invalid_marker(self):
req = self._get_request(True,
'/v2/1234/os-hypervisors/detail?marker=99999')
self.assertRaises(exc.HTTPBadRequest,
self.controller.detail, req)
def test_detail_pagination_with_invalid_string_limit(self):
req = self._get_request(True,
'/v2/1234/os-hypervisors/detail?limit=abc')
self.assertRaises(exception.ValidationError,
self.controller.detail, req)
def test_detail_duplicate_query_parameters_with_invalid_string_limit(self):
req = self._get_request(
True,
'/v2/1234/os-hypervisors/detail?limit=1&limit=abc')
self.assertRaises(exception.ValidationError,
self.controller.detail, req)
def test_detail_duplicate_query_parameters_validation(self):
expected = [
{'cpu_info': {'arch': 'x86_64',
'features': [],
'model': '',
'topology': {'cores': 1,
'sockets': 1,
'threads': 1},
'vendor': 'fake'},
'current_workload': 2,
'disk_available_least': 100,
'free_disk_gb': 125,
'free_ram_mb': 5120,
'host_ip': netaddr.IPAddress('2.2.2.2'),
'hypervisor_hostname': 'hyper2',
'hypervisor_type': 'xen',
'hypervisor_version': 3,
'id': 2,
'local_gb': 250,
'local_gb_used': 125,
'memory_mb': 10240,
'memory_mb_used': 5120,
'running_vms': 2,
'service': {'disabled_reason': None,
'host': 'compute2',
'id': 2},
'state': 'up',
'status': 'enabled',
'vcpus': 4,
'vcpus_used': 2}
]
params = {
'limit': 1,
'marker': 1,
}
for param, value in params.items():
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors/detail?marker=1&%s=%s&%s=%s' %
(param, value, param, value))
result = self.controller.detail(req)
self.assertEqual(expected, result['hypervisors'])
def test_detail_pagination_with_additional_filter(self):
link = 'http://localhost/v2/hypervisors/detail?limit=1&marker=2'
expected = {
'hypervisors': [
{'cpu_info': {'arch': 'x86_64',
'features': [],
'model': '',
'topology': {'cores': 1,
'sockets': 1,
'threads': 1},
'vendor': 'fake'},
'current_workload': 2,
'disk_available_least': 100,
'free_disk_gb': 125,
'free_ram_mb': 5120,
'host_ip': netaddr.IPAddress('2.2.2.2'),
'hypervisor_hostname': 'hyper2',
'hypervisor_type': 'xen',
'hypervisor_version': 3,
'id': 2,
'local_gb': 250,
'local_gb_used': 125,
'memory_mb': 10240,
'memory_mb_used': 5120,
'running_vms': 2,
'service': {'disabled_reason': None,
'host': 'compute2',
'id': 2},
'state': 'up',
'status': 'enabled',
'vcpus': 4,
'vcpus_used': 2}
],
'hypervisors_links': [{
'href': link,
'rel': 'next'}]
}
req = self._get_request(
True, '/v2/1234/os-hypervisors/detail?limit=1&marker=1&unknown=2')
result = self.controller.detail(req)
self.assertEqual(expected, result)
class HypervisorsTestV252(HypervisorsTestV233):
api_version = '2.52'
class HypervisorsTestV253(HypervisorsTestV252):
api_version = hypervisors_v21.UUID_FOR_ID_MIN_VERSION
expect_uuid_for_id = True
INDEX_HYPER_DICTS = [
dict(id=uuids.hyper1, hypervisor_hostname="hyper1",
state='up', status='enabled'),
dict(id=uuids.hyper2, hypervisor_hostname="hyper2",
state='up', status='enabled')]
def setUp(self):
super(HypervisorsTestV253, self).setUp()
for index, detail_hyper_dict in enumerate(self.DETAIL_HYPERS_DICTS):
detail_hyper_dict['id'] = TEST_HYPERS[index]['uuid']
detail_hyper_dict['service']['id'] = TEST_SERVICES[index].uuid
def test_servers(self):
self.assertRaises(exception.VersionNotFoundForAPIMethod,
self.controller.servers,
self._get_request(True), 'hyper')
def test_servers_with_no_server(self):
with mock.patch.object(self.controller.host_api,
'instance_get_all_by_host',
return_value=[]) as mock_inst_get_all:
req = self._get_request(use_admin_context=True,
url='/os-hypervisors?with_servers=1')
result = self.controller.index(req)
self.assertEqual(dict(hypervisors=self.INDEX_HYPER_DICTS), result)
self.assertEqual(2, mock_inst_get_all.call_count)
mock_inst_get_all.assert_has_calls((
mock.call(req.environ['nova.context'], TEST_HYPERS_OBJ[0].host),
mock.call(req.environ['nova.context'], TEST_HYPERS_OBJ[1].host)))
def test_servers_not_mapped(self):
req = self._get_request(use_admin_context=True,
url='/os-hypervisors?with_servers=1')
with mock.patch.object(
self.controller.host_api, 'instance_get_all_by_host',
side_effect=exception.HostMappingNotFound(name='something')):
result = self.controller.index(req)
self.assertEqual(dict(hypervisors=[]), result)
def test_list_with_servers(self):
instances = [
objects.InstanceList(objects=[objects.Instance(
id=1, uuid=uuids.hyper1_instance1)]),
objects.InstanceList(objects=[objects.Instance(
id=2, uuid=uuids.hyper2_instance1)])]
with mock.patch.object(self.controller.host_api,
'instance_get_all_by_host',
side_effect=instances) as mock_inst_get_all:
req = self._get_request(use_admin_context=True,
url='/os-hypervisors?with_servers=True')
result = self.controller.index(req)
index_with_servers = copy.deepcopy(self.INDEX_HYPER_DICTS)
index_with_servers[0]['servers'] = [
{'name': 'instance-00000001', 'uuid': uuids.hyper1_instance1}]
index_with_servers[1]['servers'] = [
{'name': 'instance-00000002', 'uuid': uuids.hyper2_instance1}]
self.assertEqual(dict(hypervisors=index_with_servers), result)
self.assertEqual(2, mock_inst_get_all.call_count)
mock_inst_get_all.assert_has_calls((
mock.call(req.environ['nova.context'], TEST_HYPERS_OBJ[0].host),
mock.call(req.environ['nova.context'], TEST_HYPERS_OBJ[1].host)))
def test_list_with_servers_invalid_parameter(self):
req = self._get_request(use_admin_context=True,
url='/os-hypervisors?with_servers=invalid')
self.assertRaises(
exception.ValidationError, self.controller.index, req)
def test_list_with_hostname_pattern_and_paging_parameters(self):
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors?hypervisor_hostname_pattern=foo&'
'limit=1&marker=%s' % uuids.marker)
ex = self.assertRaises(exc.HTTPBadRequest, self.controller.index, req)
self.assertIn('Paging over hypervisors with the '
'hypervisor_hostname_pattern query parameter is not '
'supported.', six.text_type(ex))
def test_servers_with_non_integer_hypervisor_id(self):
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors?with_servers=yes&'
'hypervisor_hostname_pattern=shenzhen')
with mock.patch.object(self.controller.host_api,
'compute_node_search_by_hypervisor',
return_value=objects.ComputeNodeList()) as s:
self.assertRaises(exc.HTTPNotFound, self.controller.index, req)
s.assert_called_once_with(req.environ['nova.context'], 'shenzhen')
def test_servers_non_admin(self):
pass
def test_servers_non_id(self):
pass
def test_search_old_route(self):
self.assertRaises(exception.VersionNotFoundForAPIMethod,
self.controller.search,
self._get_request(True), 'hyper')
def test_search(self):
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors?hypervisor_hostname_pattern=shenzhen')
with mock.patch.object(self.controller.host_api,
'compute_node_search_by_hypervisor',
return_value=objects.ComputeNodeList(
objects=[TEST_HYPERS_OBJ[0]])) as s:
result = self.controller.detail(req)
s.assert_called_once_with(req.environ['nova.context'], 'shenzhen')
expected = {
'hypervisors': [
{'cpu_info': {'arch': 'x86_64',
'features': [],
'model': '',
'topology': {'cores': 1,
'sockets': 1,
'threads': 1},
'vendor': 'fake'},
'current_workload': 2,
'disk_available_least': 100,
'free_disk_gb': 125,
'free_ram_mb': 5120,
'host_ip': netaddr.IPAddress('1.1.1.1'),
'hypervisor_hostname': 'hyper1',
'hypervisor_type': 'xen',
'hypervisor_version': 3,
'id': TEST_HYPERS_OBJ[0].uuid,
'local_gb': 250,
'local_gb_used': 125,
'memory_mb': 10240,
'memory_mb_used': 5120,
'running_vms': 2,
'service': {'disabled_reason': None,
'host': 'compute1',
'id': TEST_SERVICES[0].uuid},
'state': 'up',
'status': 'enabled',
'vcpus': 4,
'vcpus_used': 2}
]
}
self.assertNotIn('hypervisors_links', result)
self.assertDictEqual(expected, result)
def test_search_invalid_hostname_pattern_parameter(self):
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors?hypervisor_hostname_pattern=invalid~host')
self.assertRaises(
exception.ValidationError, self.controller.detail, req)
def test_search_non_exist(self):
pass
def test_search_non_admin(self):
pass
def test_search_unmapped(self):
pass
def test_show_non_integer_id(self):
pass
def test_show_integer_id(self):
req = self._get_request(True)
ex = self.assertRaises(exc.HTTPBadRequest,
self.controller.show, req, '1')
self.assertIn('Invalid uuid 1', six.text_type(ex))
def test_show_with_servers_invalid_parameter(self):
hyper_id = self._get_hyper_id()
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors/%s?with_servers=invalid' % hyper_id)
ex = self.assertRaises(
exception.ValidationError, self.controller.show, req, hyper_id)
self.assertIn('with_servers', six.text_type(ex))
def test_show_with_servers_host_mapping_not_found(self):
hyper_id = self._get_hyper_id()
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors/%s?with_servers=true' % hyper_id)
with mock.patch.object(
self.controller.host_api, 'instance_get_all_by_host',
side_effect=exception.HostMappingNotFound(name=hyper_id)):
self.assertRaises(exc.HTTPNotFound, self.controller.show,
req, hyper_id)
def test_show_with_servers(self):
instances = objects.InstanceList(objects=[objects.Instance(
id=1, uuid=uuids.hyper1_instance1)])
hyper_id = self._get_hyper_id()
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors/%s?with_servers=on' % hyper_id)
with mock.patch.object(self.controller.host_api,
'instance_get_all_by_host',
return_value=instances) as mock_inst_get_all:
result = self.controller.show(req, hyper_id)
show_with_servers = copy.deepcopy(self.DETAIL_HYPERS_DICTS[0])
show_with_servers['servers'] = [
{'name': 'instance-00000001', 'uuid': uuids.hyper1_instance1}]
self.assertDictEqual(dict(hypervisor=show_with_servers), result)
# instance_get_all_by_host is called
mock_inst_get_all.assert_called_once_with(
req.environ['nova.context'], TEST_HYPERS_OBJ[0].host)
def test_uptime_non_integer_id(self):
pass
def test_uptime_integer_id(self):
req = self._get_request(True)
ex = self.assertRaises(exc.HTTPBadRequest,
self.controller.uptime, req, '1')
self.assertIn('Invalid uuid 1', six.text_type(ex))
def test_detail_pagination(self):
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors/detail?limit=1&marker=%s' %
TEST_HYPERS_OBJ[0].uuid)
result = self.controller.detail(req)
link = ('http://localhost/v2/hypervisors/detail?limit=1&marker=%s' %
TEST_HYPERS_OBJ[1].uuid)
expected = {
'hypervisors': [
{'cpu_info': {'arch': 'x86_64',
'features': [],
'model': '',
'topology': {'cores': 1,
'sockets': 1,
'threads': 1},
'vendor': 'fake'},
'current_workload': 2,
'disk_available_least': 100,
'free_disk_gb': 125,
'free_ram_mb': 5120,
'host_ip': netaddr.IPAddress('2.2.2.2'),
'hypervisor_hostname': 'hyper2',
'hypervisor_type': 'xen',
'hypervisor_version': 3,
'id': TEST_HYPERS_OBJ[1].uuid,
'local_gb': 250,
'local_gb_used': 125,
'memory_mb': 10240,
'memory_mb_used': 5120,
'running_vms': 2,
'service': {'disabled_reason': None,
'host': 'compute2',
'id': TEST_SERVICES[1].uuid},
'state': 'up',
'status': 'enabled',
'vcpus': 4,
'vcpus_used': 2}
],
'hypervisors_links': [{'href': link, 'rel': 'next'}]
}
self.assertEqual(expected, result)
def test_detail_pagination_with_invalid_marker(self):
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors/detail?marker=%s' % uuids.invalid_marker)
self.assertRaises(exc.HTTPBadRequest,
self.controller.detail, req)
def test_detail_pagination_with_additional_filter(self):
req = self._get_request(
True, '/v2/1234/os-hypervisors/detail?limit=1&marker=9&unknown=2')
self.assertRaises(exception.ValidationError,
self.controller.detail, req)
def test_detail_duplicate_query_parameters_validation(self):
params = {
'limit': 1,
'marker': uuids.marker,
'hypervisor_hostname_pattern': 'foo',
'with_servers': 'true'
}
for param, value in params.items():
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors/detail?%s=%s&%s=%s' %
(param, value, param, value))
self.assertRaises(exception.ValidationError,
self.controller.detail, req)
def test_index_pagination(self):
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors?limit=1&marker=%s' %
TEST_HYPERS_OBJ[0].uuid)
result = self.controller.index(req)
link = ('http://localhost/v2/hypervisors?limit=1&marker=%s' %
TEST_HYPERS_OBJ[1].uuid)
expected = {
'hypervisors': [{
'hypervisor_hostname': 'hyper2',
'id': TEST_HYPERS_OBJ[1].uuid,
'state': 'up',
'status': 'enabled'
}],
'hypervisors_links': [{'href': link, 'rel': 'next'}]
}
self.assertEqual(expected, result)
def test_index_pagination_with_invalid_marker(self):
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors?marker=%s' % uuids.invalid_marker)
self.assertRaises(exc.HTTPBadRequest,
self.controller.index, req)
def test_index_pagination_with_additional_filter(self):
req = self._get_request(
True, '/v2/1234/os-hypervisors/?limit=1&marker=9&unknown=2')
self.assertRaises(exception.ValidationError,
self.controller.index, req)
def test_index_duplicate_query_parameters_validation(self):
params = {
'limit': 1,
'marker': uuids.marker,
'hypervisor_hostname_pattern': 'foo',
'with_servers': 'true'
}
for param, value in params.items():
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors?%s=%s&%s=%s' %
(param, value, param, value))
self.assertRaises(exception.ValidationError,
self.controller.index, req)
def test_show_duplicate_query_parameters_validation(self):
req = self._get_request(
use_admin_context=True,
url='/os-hypervisors/%s?with_servers=1&with_servers=1' %
uuids.hyper1)
self.assertRaises(exception.ValidationError,
self.controller.show, req, uuids.hyper1)
| true
| true
|
1c47f98f44be7b87e01c9b1a097b1376b38cc5f4
| 1,906
|
py
|
Python
|
sdk/python/setup.py
|
agilecreativity/pulumi-docker
|
0a6928ef65763f30820837d63d3ad9e59ad993e1
|
[
"Apache-2.0"
] | 45
|
2018-09-22T07:48:05.000Z
|
2022-02-15T08:48:26.000Z
|
sdk/python/setup.py
|
agilecreativity/pulumi-docker
|
0a6928ef65763f30820837d63d3ad9e59ad993e1
|
[
"Apache-2.0"
] | 178
|
2018-09-01T23:59:42.000Z
|
2022-03-31T22:05:46.000Z
|
sdk/python/setup.py
|
agilecreativity/pulumi-docker
|
0a6928ef65763f30820837d63d3ad9e59ad993e1
|
[
"Apache-2.0"
] | 21
|
2018-10-11T08:00:17.000Z
|
2022-01-26T02:28:20.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import errno
from setuptools import setup, find_packages
from setuptools.command.install import install
from subprocess import check_call
class InstallPluginCommand(install):
def run(self):
install.run(self)
try:
check_call(['pulumi', 'plugin', 'install', 'resource', 'docker', '${PLUGIN_VERSION}'])
except OSError as error:
if error.errno == errno.ENOENT:
print("""
There was an error installing the docker resource provider plugin.
It looks like `pulumi` is not installed on your system.
Please visit https://pulumi.com/ to install the Pulumi CLI.
You may try manually installing the plugin by running
`pulumi plugin install resource docker ${PLUGIN_VERSION}`
""")
else:
raise
def readme():
with open('README.md', encoding='utf-8') as f:
return f.read()
setup(name='pulumi_docker',
version='${VERSION}',
description="A Pulumi package for interacting with Docker in Pulumi programs",
long_description=readme(),
long_description_content_type='text/markdown',
cmdclass={
'install': InstallPluginCommand,
},
keywords='pulumi docker',
url='https://pulumi.io',
project_urls={
'Repository': 'https://github.com/pulumi/pulumi-docker'
},
license='Apache-2.0',
packages=find_packages(),
package_data={
'pulumi_docker': [
'py.typed',
]
},
install_requires=[
'parver>=0.2.1',
'pulumi>=3.0.0,<4.0.0',
'semver>=2.8.1'
],
zip_safe=False)
| 31.766667
| 98
| 0.589717
|
import errno
from setuptools import setup, find_packages
from setuptools.command.install import install
from subprocess import check_call
class InstallPluginCommand(install):
def run(self):
install.run(self)
try:
check_call(['pulumi', 'plugin', 'install', 'resource', 'docker', '${PLUGIN_VERSION}'])
except OSError as error:
if error.errno == errno.ENOENT:
print("""
There was an error installing the docker resource provider plugin.
It looks like `pulumi` is not installed on your system.
Please visit https://pulumi.com/ to install the Pulumi CLI.
You may try manually installing the plugin by running
`pulumi plugin install resource docker ${PLUGIN_VERSION}`
""")
else:
raise
def readme():
with open('README.md', encoding='utf-8') as f:
return f.read()
setup(name='pulumi_docker',
version='${VERSION}',
description="A Pulumi package for interacting with Docker in Pulumi programs",
long_description=readme(),
long_description_content_type='text/markdown',
cmdclass={
'install': InstallPluginCommand,
},
keywords='pulumi docker',
url='https://pulumi.io',
project_urls={
'Repository': 'https://github.com/pulumi/pulumi-docker'
},
license='Apache-2.0',
packages=find_packages(),
package_data={
'pulumi_docker': [
'py.typed',
]
},
install_requires=[
'parver>=0.2.1',
'pulumi>=3.0.0,<4.0.0',
'semver>=2.8.1'
],
zip_safe=False)
| true
| true
|
1c47fa0e5e9054dbfe8764c09a2d5ba88af0b2fc
| 5,622
|
py
|
Python
|
docs/conf.py
|
tonyseek/python-orphanage
|
df466c567fda82ef4f6d949b19b5a0b33744513c
|
[
"MIT"
] | 11
|
2018-05-08T08:05:31.000Z
|
2021-03-31T08:57:26.000Z
|
docs/conf.py
|
tonyseek/python-orphanage
|
df466c567fda82ef4f6d949b19b5a0b33744513c
|
[
"MIT"
] | 2
|
2018-05-09T12:01:23.000Z
|
2019-10-21T17:24:40.000Z
|
docs/conf.py
|
tonyseek/python-orphanage
|
df466c567fda82ef4f6d949b19b5a0b33744513c
|
[
"MIT"
] | 2
|
2018-06-19T06:28:02.000Z
|
2021-03-02T01:59:04.000Z
|
# -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/master/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
project = u'orphanage'
copyright = u'2018, Jiangge Zhang'
author = u'Jiangge Zhang'
# The short X.Y version
version = u''
# The full version, including alpha/beta/rc tags
release = u'0.1.0'
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.coverage',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path .
exclude_patterns = [u'_build', u'Thumbs.db', u'.DS_Store', u'.gitignore',
u'Makefile']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
html_theme_options = {
'logo_name': True,
'description': 'Let orphan processes suicide',
'github_user': 'tonyseek',
'github_repo': 'python-orphanage',
'github_type': 'star',
'github_count': True,
'github_banner': 'github-ribbons.png',
'github_button': True,
'travis_button': True,
}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
html_sidebars = {
'**': [
'about.html',
'navigation.html',
'relations.html',
'searchbox.html',
'donate.html',
],
}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'orphanagedoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'orphanage.tex', u'orphanage Documentation',
u'Jiangge Zhang', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'orphanage', u'orphanage Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'orphanage', u'orphanage Documentation',
author, 'orphanage', 'One line description of project.',
'Miscellaneous'),
]
# -- Extension configuration -------------------------------------------------
# -- Options for intersphinx extension ---------------------------------------
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
| 30.225806
| 79
| 0.639808
|
project = u'orphanage'
copyright = u'2018, Jiangge Zhang'
author = u'Jiangge Zhang'
version = u''
release = u'0.1.0'
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.coverage',
]
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
language = None
exclude_patterns = [u'_build', u'Thumbs.db', u'.DS_Store', u'.gitignore',
u'Makefile']
pygments_style = 'sphinx'
html_theme = 'alabaster'
html_theme_options = {
'logo_name': True,
'description': 'Let orphan processes suicide',
'github_user': 'tonyseek',
'github_repo': 'python-orphanage',
'github_type': 'star',
'github_count': True,
'github_banner': 'github-ribbons.png',
'github_button': True,
'travis_button': True,
}
html_static_path = ['_static']
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
html_sidebars = {
'**': [
'about.html',
'navigation.html',
'relations.html',
'searchbox.html',
'donate.html',
],
}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'orphanagedoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'orphanage.tex', u'orphanage Documentation',
u'Jiangge Zhang', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'orphanage', u'orphanage Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'orphanage', u'orphanage Documentation',
author, 'orphanage', 'One line description of project.',
'Miscellaneous'),
]
# -- Extension configuration -------------------------------------------------
# -- Options for intersphinx extension ---------------------------------------
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
| true
| true
|
1c47fa3816f95e926ae95ef3681d5bf62a93e19a
| 1,212
|
py
|
Python
|
LeetCode/python-R1/0049-字母异位词分组D/V2-OverTIme.py
|
huuuuusy/Programming-Practice-Everyday
|
c78b368ab0439d85b8a69f6d9c8154d708bafc9c
|
[
"Apache-2.0"
] | 4
|
2019-08-27T11:28:03.000Z
|
2020-12-24T07:10:22.000Z
|
LeetCode/python-R1/0049-字母异位词分组D/V2-OverTIme.py
|
huuuuusy/Programming-Practice-Everyday
|
c78b368ab0439d85b8a69f6d9c8154d708bafc9c
|
[
"Apache-2.0"
] | null | null | null |
LeetCode/python-R1/0049-字母异位词分组D/V2-OverTIme.py
|
huuuuusy/Programming-Practice-Everyday
|
c78b368ab0439d85b8a69f6d9c8154d708bafc9c
|
[
"Apache-2.0"
] | 4
|
2019-09-20T09:44:01.000Z
|
2020-12-24T07:10:23.000Z
|
"""
@Author: huuuuusy
@GitHub: https://github.com/huuuuusy
系统: Ubuntu 18.04
IDE: VS Code 1.36
工具: python == 3.7.3
"""
"""
思路:
在V1的基础上加了字典对输入进行长度分类,在每一类里循环判断
速度比V1快一些,但仍然超时
结果:
第100个测试用例超时
"""
from collections import Counter
class Solution:
def groupAnagrams(self, strs):
res = []
if len(strs) == 0:
return res
if len(strs) == 1:
return [strs]
d = {}
for s in strs:
d[len(s)] = d.get(len(s),[]) + [s]
for key, value in d.items():
res_part = [[value[0]]]
if len(value) > 1:
for item in value[1:]:
for i in range(len(res_part)):
add = False
if Counter(res_part[i][0]) == Counter(item):
res_part[i].append(item)
add = True
break
if add == False:
res_part.append([item])
res.extend(res_part)
return res
if __name__ == "__main__":
strs = ["eat", "tea", "tan", "ate", "nat", "bat",'apple']
answer = Solution().groupAnagrams(strs)
print(answer)
| 24.734694
| 68
| 0.459571
|
from collections import Counter
class Solution:
def groupAnagrams(self, strs):
res = []
if len(strs) == 0:
return res
if len(strs) == 1:
return [strs]
d = {}
for s in strs:
d[len(s)] = d.get(len(s),[]) + [s]
for key, value in d.items():
res_part = [[value[0]]]
if len(value) > 1:
for item in value[1:]:
for i in range(len(res_part)):
add = False
if Counter(res_part[i][0]) == Counter(item):
res_part[i].append(item)
add = True
break
if add == False:
res_part.append([item])
res.extend(res_part)
return res
if __name__ == "__main__":
strs = ["eat", "tea", "tan", "ate", "nat", "bat",'apple']
answer = Solution().groupAnagrams(strs)
print(answer)
| true
| true
|
1c47fa676f9225cd4fb2d3c9c868a8eaf56f7c8f
| 825
|
py
|
Python
|
src/native_bayes/classify.py
|
zegra1989/ml
|
ed574ff45d4852d0c93f1ad5d7e0160cd752c9e0
|
[
"MIT"
] | null | null | null |
src/native_bayes/classify.py
|
zegra1989/ml
|
ed574ff45d4852d0c93f1ad5d7e0160cd752c9e0
|
[
"MIT"
] | null | null | null |
src/native_bayes/classify.py
|
zegra1989/ml
|
ed574ff45d4852d0c93f1ad5d7e0160cd752c9e0
|
[
"MIT"
] | null | null | null |
def NBAccuracy(features_train, labels_train, features_test, labels_test):
""" compute the accuracy of your Naive Bayes classifier """
### import the sklearn module for GaussianNB
from sklearn.naive_bayes import GaussianNB
from sklearn.metrics import accuracy_score
### create classifier
clf = GaussianNB()
### fit the classifier on the training features and labels
clf.fit(features_train, labels_train)
### use the trained classifier to predict labels for the test features
pred = clf.predict(features_test)
### calculate and return the accuracy on the test data
### this is slightly different than the example,
### where we just print the accuracy
### you might need to import an sklearn module
accuracy = accuracy_score(pred, labels_test)
return accuracy
| 39.285714
| 74
| 0.727273
|
def NBAccuracy(features_train, labels_train, features_test, labels_test):
acy_score
| true
| true
|
1c47fbfef732fe271a24cdb151acef2779e5603d
| 644
|
py
|
Python
|
tools/telemetry/telemetry/unittest_util/options_for_unittests.py
|
kjthegod/chromium
|
cf940f7f418436b77e15b1ea23e6fa100ca1c91a
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 2,151
|
2020-04-18T07:31:17.000Z
|
2022-03-31T08:39:18.000Z
|
telemetry/telemetry/testing/options_for_unittests.py
|
kind-john/catapult
|
29635376119833f172a58a48a3282d353ce55d2b
|
[
"BSD-3-Clause"
] | 395
|
2020-04-18T08:22:18.000Z
|
2021-12-08T13:04:49.000Z
|
telemetry/telemetry/testing/options_for_unittests.py
|
kind-john/catapult
|
29635376119833f172a58a48a3282d353ce55d2b
|
[
"BSD-3-Clause"
] | 338
|
2020-04-18T08:03:10.000Z
|
2022-03-29T12:33:22.000Z
|
# Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""This module provides the global variable options_for_unittests.
This is set to a BrowserOptions object by the test harness, or None
if unit tests are not running.
This allows multiple unit tests to use a specific
browser, in face of multiple options."""
_options = []
def Push(options):
_options.append(options)
def Pop():
return _options.pop()
def GetCopy():
if not AreSet():
return None
return _options[-1].Copy()
def AreSet():
return bool(_options)
| 19.515152
| 72
| 0.734472
|
_options = []
def Push(options):
_options.append(options)
def Pop():
return _options.pop()
def GetCopy():
if not AreSet():
return None
return _options[-1].Copy()
def AreSet():
return bool(_options)
| true
| true
|
1c47fc0ab6403bb40623a1044733f8334c1d1740
| 33,931
|
py
|
Python
|
theano/tensor/var.py
|
EnjoyLifeFund/py36pkgs
|
0ac677fbbfa7b6d8c527fe2c759ba05117b07fd2
|
[
"MIT",
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null |
theano/tensor/var.py
|
EnjoyLifeFund/py36pkgs
|
0ac677fbbfa7b6d8c527fe2c759ba05117b07fd2
|
[
"MIT",
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null |
theano/tensor/var.py
|
EnjoyLifeFund/py36pkgs
|
0ac677fbbfa7b6d8c527fe2c759ba05117b07fd2
|
[
"MIT",
"BSD-2-Clause",
"BSD-3-Clause"
] | 1
|
2019-09-09T18:31:41.000Z
|
2019-09-09T18:31:41.000Z
|
from __future__ import absolute_import, print_function, division
import collections
import copy
import traceback as tb
import warnings
import numpy
from six import integer_types
from six.moves import xrange
import theano
from theano.compat import PY3
from theano.scalar import ComplexError, IntegerDivisionError
from theano.gof import Constant, Variable
from theano.gof.utils import hashtype
from theano.tensor.utils import hash_from_ndarray
from theano.tensor.type import TensorType
from theano.configparser import config
def equal_slices(s1, s2):
return (s1.start == s2.start and
s1.stop == s2.stop and
s1.step == s2.step)
class AsTensorError(TypeError):
"""
Raised when as_tensor_variable isn't able to create a TensorVariable.
"""
pass
class _tensor_py_operators(object):
# UNARY
def __abs__(self):
return theano.tensor.basic.abs_(self)
def __neg__(self):
return theano.tensor.basic.neg(self)
# CASTS
# REMOVED THESE BECAUSE PYTHON appears to require __int__ to return
# an int. -JB 20081112
# def __int__(self): return convert_to_int32(self)
# def __float__(self): return convert_to_float64(self)
# def __complex__(self): return convert_to_complex128(self)
# COMPARISONS
_is_nonzero = True
def __lt__(self, other):
rval = theano.tensor.basic.lt(self, other)
rval._is_nonzero = False
return rval
def __le__(self, other):
rval = theano.tensor.basic.le(self, other)
rval._is_nonzero = False
return rval
def __gt__(self, other):
rval = theano.tensor.basic.gt(self, other)
rval._is_nonzero = False
return rval
def __ge__(self, other):
rval = theano.tensor.basic.ge(self, other)
rval._is_nonzero = False
return rval
def __nonzero__(self):
# Python 2.x
return self.__bool__()
def __bool__(self):
# This is meant to prohibit stuff like a < b < c, which is internally
# implemented as (a < b) and (b < c). The trouble with this is the
# side-effect that checking for a non-NULL a by typing "if a: ..."
# uses the same __nonzero__ method. We want these both to work, but
# it seems impossible. Currently, all vars evaluate to nonzero except
# the return values of comparison operators, which raise this
# exception. If you can think of a better solution, go for it!
#
# __bool__ is Python 3.x data model. __nonzero__ is Python 2.x.
if self._is_nonzero:
return True
else:
raise TypeError(
"Variables do not support boolean operations."
)
# BITWISE
def __invert__(self):
return theano.tensor.basic.invert(self)
def __and__(self, other):
return theano.tensor.basic.and_(self, other)
def __or__(self, other):
return theano.tensor.basic.or_(self, other)
def __xor__(self, other):
return theano.tensor.basic.xor(self, other)
def __rand__(self, other):
return theano.tensor.basic.and_(other, self)
def __ror__(self, other):
return theano.tensor.basic.or_(other, self)
def __rxor__(self, other):
return theano.tensor.basic.xor(other, self)
# def __iand__(self, other):
# return _and_inplace(self, other)
#
# def __ior__(self, other):
# return _or_inplace(self, other)
#
# def __ixor__(self, other):
# return _xor_inplace(self, other)
# ARITHMETIC - NORMAL
def __add__(self, other):
try:
return theano.tensor.basic.add(self, other)
# We should catch the minimum number of exception here.
# Otherwise this will convert error when Theano flags
# compute_test_value is used
# Evidently, we need to catch NotImplementedError
# TypeError from as_tensor_variable are caught in Elemwise.make_node
# Oterwise TensorVariable * SparseVariable won't work!
except (NotImplementedError, AsTensorError):
# We must return NotImplemented and not an
# NotImplementedError or raise an NotImplementedError.
# That way python will give a good error message like this
# `TypeError: unsupported operand type(s) for +:
# 'TensorVariable' and 'TensorVariable'`
return NotImplemented
def __sub__(self, other):
# See explanation in __add__ for the error catched
# and the return value in that case
try:
return theano.tensor.basic.sub(self, other)
except (NotImplementedError, AsTensorError):
return NotImplemented
def __mul__(self, other):
# See explanation in __add__ for the error catched
# and the return value in that case
try:
return theano.tensor.mul(self, other)
except (NotImplementedError, AsTensorError):
return NotImplemented
def __div__(self, other):
# See explanation in __add__ for the error catched
# and the return value in that case
try:
return theano.tensor.basic.div_proxy(self, other)
except IntegerDivisionError:
# This is to raise the exception that occurs when trying to divide
# two integer arrays (currently forbidden).
raise
except (NotImplementedError, AsTensorError):
return NotImplemented
if PY3:
__truediv__ = __div__
def __pow__(self, other):
# See explanation in __add__ for the error catched
# adn the return value in that case
try:
return theano.tensor.basic.pow(self, other)
except (NotImplementedError, AsTensorError):
return NotImplemented
def __mod__(self, other):
# See explanation in __add__ for the error catched
# adn the return value in that case
try:
return theano.tensor.basic.mod_check(self, other)
except ComplexError:
# This is to raise the exception that occurs when trying to compute
# x % y with either x or y a complex number.
raise
except (NotImplementedError, AsTensorError):
return NotImplemented
def __divmod__(self, other):
return theano.tensor.basic.divmod(self, other)
def __truediv__(self, other):
return theano.tensor.basic.true_div(self, other)
def __floordiv__(self, other):
return theano.tensor.basic.floor_div(self, other)
def __rtruediv__(self, other):
return theano.tensor.basic.true_div(other, self)
def __rfloordiv__(self, other):
return theano.tensor.basic.floor_div(other, self)
# DO NOT USE THESE BECAUSE INPLACE OPS SHOULD BE INSERTED
# BY OPTIMIZATIONS ONLY
# ARITHMETIC - INPLACE
# def __iadd__(self, other):
# return _add_inplace(self, other)
# def __isub__(self, other):
# return _sub_inplace(self, other)
#
# def __imul__(self, other):
# return _mul_inplace(self, other)
#
# def __idiv__(self, other):
# return _div_inplace(self, other)
#
# def __ipow__(self, other):
# return _pow_inplace(self, other)
# ARITHMETIC - RIGHT-OPERAND
def __radd__(self, other):
return theano.tensor.basic.add(other, self)
def __rsub__(self, other):
return theano.tensor.basic.sub(other, self)
def __rmul__(self, other):
return theano.tensor.basic.mul(other, self)
def __rdiv__(self, other):
return theano.tensor.basic.div_proxy(other, self)
def __rmod__(self, other):
return theano.tensor.basic.mod(other, self)
def __rdivmod__(self, other):
return theano.tensor.basic.divmod(other, self)
def __rpow__(self, other):
return theano.tensor.basic.pow(other, self)
# TRANSPOSE
T = property(lambda self: theano.tensor.basic.transpose(self))
def transpose(self, *axes):
"""
Returns
-------
object
`tensor.transpose(self, axes)` or `tensor.transpose(self, axes[0])`.
If only one `axes` argument is provided and it is iterable, then it is
assumed to be the entire axes tuple, and passed intact to
tensor.transpose.
"""
if len(axes) == 0:
return theano.tensor.basic.transpose(self)
try:
iter(axes[0])
iterable = True
except TypeError:
iterable = False
if len(axes) == 1 and iterable:
return theano.tensor.basic.transpose(self, axes[0])
else:
return theano.tensor.basic.transpose(self, axes)
shape = property(lambda self: theano.tensor.basic.shape(self))
size = property(lambda self: self.shape[0] if self.ndim == 1 else
theano.tensor.basic.prod(self.shape))
# We can't implement __len__ to provide a better error message.
def any(self, axis=None, keepdims=False):
return theano.tensor.basic.any(self, axis=axis, keepdims=keepdims)
def all(self, axis=None, keepdims=False):
return theano.tensor.basic.all(self, axis=axis, keepdims=keepdims)
# Otherwise TensorVariable[:-1] does not work as Python 2.5.1 calls
# __len__ before calling __getitem__. It also does not catch the raised
# Exception!
# def __len__(self):
# # We can't implement __len__ as Python requests that this
# # function returns an integer >=0
# raise Exception("Theano Variables can't work with len(Theano "
# "Variable) due to Python restriction. You can use "
# "TheanoVariable.shape[0] instead.")
def reshape(self, shape, ndim=None):
"""Return a reshaped view/copy of this variable.
Parameters
----------
shape
Something that can be converted to a symbolic vector of integers.
ndim
The length of the shape. Passing None here means for
Theano to try and guess the length of `shape`.
.. warning:: This has a different signature than numpy's
ndarray.reshape!
In numpy you do not need to wrap the shape arguments
in a tuple, in theano you do need to.
"""
if ndim is not None:
if not isinstance(ndim, integer_types):
raise ValueError("Expected ndim to be an integer, is " +
str(type(ndim)))
return theano.tensor.basic.reshape(self, shape, ndim=ndim)
def dimshuffle(self, *pattern):
"""
Reorder the dimensions of this variable, optionally inserting
broadcasted dimensions.
Parameters
----------
pattern
List/tuple of int mixed with 'x' for broadcastable dimensions.
Examples
--------
For example, to create a 3D view of a [2D] matrix, call
``dimshuffle([0,'x',1])``. This will create a 3D view such that the
middle dimension is an implicit broadcasted dimension. To do the same
thing on the transpose of that matrix, call ``dimshuffle([1, 'x', 0])``.
Notes
-----
This function supports the pattern passed as a tuple, or as a
variable-length argument (e.g. ``a.dimshuffle(pattern)`` is equivalent
to ``a.dimshuffle(*pattern)`` where ``pattern`` is a list/tuple of ints
mixed with 'x' characters).
See Also
--------
DimShuffle
"""
if (len(pattern) == 1) and (isinstance(pattern[0], (list, tuple))):
pattern = pattern[0]
op = theano.tensor.basic.DimShuffle(list(self.type.broadcastable),
pattern)
return op(self)
def flatten(self, ndim=1):
return theano.tensor.basic.flatten(self, ndim)
def ravel(self):
return theano.tensor.basic.flatten(self)
def diagonal(self, offset=0, axis1=0, axis2=1):
return theano.tensor.basic.diagonal(self, offset, axis1, axis2)
# Transfer the data to another device
def transfer(self, target):
"""
If `target` is `'cpu'` this will transfer to a TensorType (if
not already one). Other types may define additional targets.
Parameters
----------
target : str
The desired location of the output variable
"""
return theano.tensor.transfer(self, target)
# Elemwise
def arccos(self):
return theano.tensor.arccos(self)
def arccosh(self):
return theano.tensor.arccosh(self)
def arcsin(self):
return theano.tensor.arcsin(self)
def arcsinh(self):
return theano.tensor.arcsinh(self)
def arctan(self):
return theano.tensor.arctan(self)
def arctanh(self):
return theano.tensor.arctanh(self)
def ceil(self):
return theano.tensor.ceil(self)
def cos(self):
return theano.tensor.cos(self)
def cosh(self):
return theano.tensor.cosh(self)
def deg2rad(self):
return theano.tensor.deg2rad(self)
def exp(self):
return theano.tensor.exp(self)
def exp2(self):
return theano.tensor.exp2(self)
def expm1(self):
return theano.tensor.expm1(self)
def floor(self):
return theano.tensor.floor(self)
def log(self):
return theano.tensor.log(self)
def log10(self):
return theano.tensor.log10(self)
def log1p(self):
return theano.tensor.log1p(self)
def log2(self):
return theano.tensor.log2(self)
def rad2deg(self):
return theano.tensor.rad2deg(self)
def sin(self):
return theano.tensor.sin(self)
def sinh(self):
return theano.tensor.sinh(self)
def sqrt(self):
return theano.tensor.sqrt(self)
def tan(self):
return theano.tensor.tan(self)
def tanh(self):
return theano.tensor.tanh(self)
def trunc(self):
return theano.tensor.trunc(self)
# CASTING
def astype(self, dtype):
return theano.tensor.cast(self, dtype)
# SLICING/INDEXING
def __getitem__(self, args):
def check_bool(args_el):
try:
if (isinstance(args_el, (numpy.bool_, bool)) or
args_el.dtype == 'bool'):
raise TypeError('TensorType does not support boolean '
'mask for indexing such as tensor[x==0]. '
'Instead you can use non_zeros() such as '
'tensor[(x == 0).nonzeros()]. ')
except AttributeError:
pass
if (not isinstance(args_el, theano.tensor.Variable) and
isinstance(args_el, collections.Iterable)):
for el in args_el:
check_bool(el)
check_bool(args)
if (isinstance(args, list) and
any([isinstance(a, slice) for a in args])):
pass
elif not isinstance(args, tuple):
args = args,
# Convert an Ellipsis if provided into an appropriate number of
# slice(None).
ellipses = [i
for i, index in enumerate(args)
if index is Ellipsis]
if len(ellipses) > 1:
raise IndexError(
"an index can only have a single Ellipsis (`...`)")
elif len(ellipses) == 1:
new_axes = sum(1
for index in args
if index is numpy.newaxis) # numpy.newaxis is None
ellipsis_at = ellipses[0]
args = list(args)
args[ellipsis_at: ellipsis_at + 1] = (
[slice(None)] * (self.ndim - (len(args) - 1 - new_axes)))
# Force input to be int64 datatype if input is an empty list or tuple
# Else leave it as is if it is a real number
args = tuple([numpy.array(inp, dtype=numpy.int64)
if(inp == [] or inp == ()) else inp for inp in args])
# Convert python literals to theano constants
args = theano.tensor.subtensor.make_constant(args)
# Determine if advanced indexing is needed or not
# The logic is already in Subtensor.convert: if it succeeds,
# standard indexing is used; if it fails with
# AdvancedIndexingError, advanced indexing
advanced = False
axis = None
for i, arg in enumerate(args):
try:
if arg is not numpy.newaxis:
theano.tensor.subtensor.Subtensor.convert(arg)
except theano.tensor.subtensor.AdvancedIndexingError:
if advanced:
axis = None
break
else:
advanced = True
axis = i
if advanced:
if (axis is not None and
all(isinstance(a, slice) and
equal_slices(a, slice(None)) for a in args[:axis]) and
all(isinstance(a, slice) and
equal_slices(a, slice(None)) for a in args[axis + 1:]) and
isinstance(args[axis],
(numpy.ndarray, list,
TensorVariable, TensorConstant,
theano.tensor.sharedvar.TensorSharedVariable))):
return self.take(args[axis], axis)
else:
return theano.tensor.subtensor.advanced_subtensor(self, *args)
else:
if numpy.newaxis in args:
# None (aka np.newaxis) in numpy indexing means to add a
# broadcastable dimension, which theano traditionally did with
# the dimshuffle op. The following code converts numpy-style
# indexing on self to traditional [read: implemented] theano
# indexing on a dimshuffled view of self.
counter = 0
pattern = []
new_args = []
for arg in args:
if arg == numpy.newaxis:
pattern.append('x')
new_args.append(slice(None, None, None))
else:
pattern.append(counter)
counter += 1
new_args.append(arg)
view = self.dimshuffle(pattern)
full_slices = True
for arg in new_args:
# We can't do arg == slice(None, None, None) as in
# Python 2.7, this call __lt__ if we have a slice
# with some symbolic variable.
if not (isinstance(arg, slice) and
arg.start is None and
arg.stop is None and
arg.step is None):
full_slices = False
if full_slices:
return view
else:
return view.__getitem__(tuple(new_args))
else:
return theano.tensor.subtensor.Subtensor(args)(
self, *theano.tensor.subtensor.Subtensor.collapse(
args,
lambda entry: isinstance(entry, Variable)))
def take(self, indices, axis=None, mode='raise'):
return theano.tensor.subtensor.take(self, indices, axis, mode)
# COPYING
def copy(self, name=None):
"""Return a symbolic copy and optionally assign a name.
Does not copy the tags.
"""
copied_variable = theano.tensor.basic.tensor_copy(self)
copied_variable.name = name
return copied_variable
def __iter__(self):
try:
for i in xrange(theano.tensor.basic.get_vector_length(self)):
yield self[i]
except TypeError:
# This prevents accidental iteration via builtin.sum(self)
raise TypeError(('TensorType does not support iteration. '
'Maybe you are using builtin.sum instead of '
'theano.tensor.sum? (Maybe .max?)'))
# CONVENIENT ACCESS TO TYPE PROPERTIES
ndim = property(lambda self: self.type.ndim)
"""The rank of this tensor."""
broadcastable = property(lambda self: self.type.broadcastable)
"""
The broadcastable signature of this tensor.
See Also
--------
broadcasting
"""
dtype = property(lambda self: self.type.dtype)
"""The dtype of this tensor."""
# extra pseudo-operator symbols
def __dot__(left, right):
return theano.tensor.basic.dot(left, right)
def __rdot__(right, left):
return theano.tensor.basic.dot(left, right)
dot = __dot__
def sum(self, axis=None, dtype=None, keepdims=False, acc_dtype=None):
"""See `theano.tensor.sum`."""
return theano.tensor.basic.sum(self, axis=axis,
dtype=dtype, keepdims=keepdims,
acc_dtype=acc_dtype)
def prod(self, axis=None, dtype=None, keepdims=False, acc_dtype=None):
"""See `theano.tensor.prod`."""
return theano.tensor.basic.prod(self, axis=axis,
dtype=dtype, keepdims=keepdims,
acc_dtype=acc_dtype)
def norm(self, L, axis=None, keepdims=False):
if L == 0:
raise NotImplementedError()
if numpy.isinf(L):
raise NotImplementedError()
# optimizations will/should catch cases like L=1, L=2
y = theano.tensor.basic.pow(
theano.tensor.basic.pow(
theano.tensor.basic.abs_(self), L).sum(axis=axis), 1.0 / L)
if keepdims:
return theano.tensor.basic.makeKeepDims(self, y, axis)
else:
return y
def mean(self, axis=None, dtype=None, keepdims=False, acc_dtype=None):
"""See `theano.tensor.mean`."""
return theano.tensor.basic.mean(self, axis=axis,
dtype=dtype, keepdims=keepdims,
acc_dtype=acc_dtype)
def var(self, axis=None, ddof=0, keepdims=False, corrected=False):
"""See `theano.tensor.var`."""
return theano.tensor.basic.var(self, axis=axis, ddof=ddof,
keepdims=keepdims, corrected=corrected)
def std(self, axis=None, ddof=0, keepdims=False, corrected=False):
"""See `theano.tensor.std`."""
return theano.tensor.basic.std(self, axis=axis, ddof=ddof,
keepdims=keepdims, corrected=corrected)
def min(self, axis=None, keepdims=False):
"""See `theano.tensor.min`."""
return theano.tensor.basic.min(self, axis, keepdims=keepdims)
def max(self, axis=None, keepdims=False):
"""See `theano.tensor.max`."""
return theano.tensor.basic.max(self, axis, keepdims=keepdims)
def argmin(self, axis=None, keepdims=False):
"""See `theano.tensor.argmin`."""
return theano.tensor.basic.argmin(self, axis, keepdims=keepdims)
def argmax(self, axis=None, keepdims=False):
"""See `theano.tensor.argmax`."""
return theano.tensor.basic.argmax(self, axis, keepdims=keepdims)
def nonzero(self, return_matrix=False):
"""See `theano.tensor.nonzero`."""
return theano.tensor.basic.nonzero(self, return_matrix=return_matrix)
def nonzero_values(self):
"""See `theano.tensor.nonzero_values`."""
return theano.tensor.basic.nonzero_values(self)
def sort(self, axis=-1, kind='quicksort', order=None):
"""See `theano.tensor.sort`."""
return theano.tensor.sort(self, axis, kind, order)
def argsort(self, axis=-1, kind='quicksort', order=None):
"""See `theano.tensor.argsort`."""
return theano.tensor.argsort(self, axis, kind, order)
def clip(self, a_min, a_max):
"Clip (limit) the values in an array."
return theano.tensor.basic.clip(self, a_min, a_max)
def conj(self):
"""See `theano.tensor.conj`."""
return theano.tensor.basic.conj(self)
conjugate = conj
def repeat(self, repeats, axis=None):
"""See `theano.tensor.repeat`."""
return theano.tensor.extra_ops.repeat(self, repeats, axis)
def round(self, mode=None):
"""See `theano.tensor.round`."""
return theano.tensor.basic.round(self, mode)
def trace(self):
return theano.tensor.nlinalg.trace(self)
# TO TRUMP NUMPY OPERATORS
__array_priority__ = 1000
def get_scalar_constant_value(self):
return theano.tensor.basic.get_scalar_constant_value(self)
def zeros_like(model, dtype=None):
return theano.tensor.basic.zeros_like(model, dtype=dtype)
def ones_like(model, dtype=None):
return theano.tensor.basic.ones_like(model, dtype=dtype)
def cumsum(self, axis=None):
return theano.tensor.extra_ops.cumsum(self, axis)
def cumprod(self, axis=None):
return theano.tensor.extra_ops.cumprod(self, axis)
def searchsorted(self, v, side='left', sorter=None):
return theano.tensor.extra_ops.searchsorted(self, v, side, sorter)
def ptp(self, axis=None):
"""See 'theano.tensor.ptp'."""
return theano.tensor.ptp(self, axis)
def swapaxes(self, axis1, axis2):
"""
Return 'tensor.swapaxes(self, axis1, axis2).
If a matrix is provided with the right axes, its transpose
will be returned.
"""
return theano.tensor.basic.swapaxes(self, axis1, axis2)
def fill(self, value):
"""Fill inputted tensor with the assigned value."""
return theano.tensor.basic.fill(self, value)
def choose(self, a, choices, out=None, mode='raise'):
"""
Construct an array from an index array and a set of arrays to choose
from.
"""
return theano.tensor.basic.choose(self, a, choices, out=None,
mode='raise')
def squeeze(self):
"""
Remove broadcastable dimensions from the shape of an array.
It returns the input array, but with the broadcastable dimensions
removed. This is always `x` itself or a view into `x`.
"""
return theano.tensor.extra_ops.squeeze(self)
def compress(self, a, axis=None):
"""Return selected slices only."""
return theano.tensor.extra_ops.compress(self, a, axis=axis)
class TensorVariable(_tensor_py_operators, Variable):
"""
Subclass to add the tensor operators to the basic `Variable` class.
"""
def __init__(self, type, owner=None, index=None, name=None):
super(TensorVariable, self).__init__(type, owner=owner,
index=index, name=name)
if (config.warn_float64 != 'ignore' and type.dtype == 'float64'):
msg = ('You are creating a TensorVariable '
'with float64 dtype. You requested an action via '
'the Theano flag warn_float64={ignore,warn,raise,pdb}.')
if config.warn_float64 == "warn":
# Get the user stack. We don't want function inside the
# tensor and gof directory to be shown to the user.
x = tb.extract_stack()
nb_rm = 0
while x:
file_path = x[-1][0]
rm = False
for p in ["theano/tensor/", "theano\\tensor\\",
"theano/gof/", "theano\\tensor\\"]:
if p in file_path:
x = x[:-1]
nb_rm += 1
rm = True
break
if not rm:
break
warnings.warn(msg, stacklevel=1 + nb_rm)
elif config.warn_float64 == "raise":
raise Exception(msg)
elif config.warn_float64 == 'pdb':
import pdb
pdb.set_trace()
TensorType.Variable = TensorVariable
class TensorConstantSignature(tuple):
"""
A Signature object for comparing TensorConstant instances.
An instance is a pair: (Type instance, ndarray).
"""
def __eq__(self, other):
if type(self) != type(other):
return False
try:
(t0, d0), (t1, d1) = self, other
except Exception:
return False
# N.B. compare shape to ensure no broadcasting in ==
if t0 != t1 or d0.shape != d1.shape:
return False
self.no_nan # Ensure has_nan is computed.
# Note that in the comparisons below, the elementwise comparisons
# come last because they are the most expensive checks.
if self.has_nan:
other.no_nan # Ensure has_nan is computed.
return (other.has_nan and
self.sum == other.sum and
(self.no_nan.mask == other.no_nan.mask).all() and
# Note that the second test below (==) may crash e.g. for
# a single scalar NaN value, so we do not run it when all
# values are missing.
(self.no_nan.mask.all() or
(self.no_nan == other.no_nan).all()))
else:
# Simple case where we do not need to worry about NaN values.
# (note that if there are NaN values in d1, this will return
# False, which is why we do not bother with testing `other.has_nan`
# here).
return (self.sum == other.sum) and numpy.all(d0 == d1)
def __hash__(self):
t, d = self
return hashtype(self) ^ hash(t) ^ hash(d.shape) ^ hash(self.sum)
def theano_hash(self):
_, d = self
return hash_from_ndarray(d)
def _get_sum(self):
"""Compute sum of non NaN / Inf values in the array."""
try:
return self._sum
except AttributeError:
self._sum = self.no_nan.sum()
# The following 2 lines are needede as in Python 3.3 with NumPy
# 1.7.1, numpy.ndarray and numpy.memmap aren't hashable.
if type(self._sum) is numpy.memmap:
self._sum = numpy.asarray(self._sum).item()
if self.has_nan and self.no_nan.mask.all():
# In this case the sum is not properly computed by numpy.
self._sum = 0
if numpy.isinf(self._sum) or numpy.isnan(self._sum):
# NaN may happen when there are both -inf and +inf values.
if self.has_nan:
# Filter both NaN and Inf values.
mask = self.no_nan.mask + numpy.isinf(self[1])
else:
# Filter only Inf values.
mask = numpy.isinf(self[1])
if mask.all():
self._sum = 0
else:
self._sum = numpy.ma.masked_array(self[1], mask).sum()
# At this point there should be no more NaN.
assert not numpy.isnan(self._sum)
return self._sum
sum = property(_get_sum)
def _get_no_nan(self):
try:
return self._no_nan
except AttributeError:
nan_mask = numpy.isnan(self[1])
if nan_mask.any():
self._no_nan = numpy.ma.masked_array(self[1], nan_mask)
self.has_nan = True
else:
self._no_nan = self[1]
self.has_nan = False
return self._no_nan
no_nan = property(_get_no_nan)
class TensorConstant(_tensor_py_operators, Constant):
"""Subclass to add the tensor operators to the basic `Constant` class.
To create a TensorConstant, use the `constant` function in this module.
"""
def __init__(self, type, data, name=None):
Constant.__init__(self, type, data, name)
self.tag.unique_value = None
if isinstance(data, numpy.ndarray) and data.ndim > 0:
flat_data = data.ravel()
if flat_data.shape[0]:
if (flat_data == flat_data[0]).all():
self.tag.unique_value = flat_data[0]
def __str__(self):
if self.tag.unique_value is not None:
name = "%s of %s" % (str(self.data.shape),
str(self.tag.unique_value))
else:
name = "%s" % self.data
if len(name) > 20:
name = name[:10] + ".." + name[-10:]
return "TensorConstant{%s}" % name
def signature(self):
return TensorConstantSignature((self.type, self.data))
def equals(self, other):
# Override Contant.equals to allow to compare with
# numpy.ndarray, and python type.
if isinstance(other, (numpy.ndarray, int, float)):
# Make a TensorConstant to be able to compare
other = theano.tensor.basic.constant(other)
return (isinstance(other, TensorConstant) and
self.signature() == other.signature())
def __copy__(self):
# We need to do this to remove the cached attribute
return type(self)(self.type, self.data, self.name)
def __deepcopy__(self, memo):
# We need to do this to remove the cached attribute
return type(self)(copy.deepcopy(self.type, memo),
copy.deepcopy(self.data, memo),
copy.deepcopy(self.name, memo))
TensorType.Constant = TensorConstant
| 35.016512
| 80
| 0.579529
|
from __future__ import absolute_import, print_function, division
import collections
import copy
import traceback as tb
import warnings
import numpy
from six import integer_types
from six.moves import xrange
import theano
from theano.compat import PY3
from theano.scalar import ComplexError, IntegerDivisionError
from theano.gof import Constant, Variable
from theano.gof.utils import hashtype
from theano.tensor.utils import hash_from_ndarray
from theano.tensor.type import TensorType
from theano.configparser import config
def equal_slices(s1, s2):
return (s1.start == s2.start and
s1.stop == s2.stop and
s1.step == s2.step)
class AsTensorError(TypeError):
pass
class _tensor_py_operators(object):
def __abs__(self):
return theano.tensor.basic.abs_(self)
def __neg__(self):
return theano.tensor.basic.neg(self)
_is_nonzero = True
def __lt__(self, other):
rval = theano.tensor.basic.lt(self, other)
rval._is_nonzero = False
return rval
def __le__(self, other):
rval = theano.tensor.basic.le(self, other)
rval._is_nonzero = False
return rval
def __gt__(self, other):
rval = theano.tensor.basic.gt(self, other)
rval._is_nonzero = False
return rval
def __ge__(self, other):
rval = theano.tensor.basic.ge(self, other)
rval._is_nonzero = False
return rval
def __nonzero__(self):
return self.__bool__()
def __bool__(self):
if self._is_nonzero:
return True
else:
raise TypeError(
"Variables do not support boolean operations."
)
def __invert__(self):
return theano.tensor.basic.invert(self)
def __and__(self, other):
return theano.tensor.basic.and_(self, other)
def __or__(self, other):
return theano.tensor.basic.or_(self, other)
def __xor__(self, other):
return theano.tensor.basic.xor(self, other)
def __rand__(self, other):
return theano.tensor.basic.and_(other, self)
def __ror__(self, other):
return theano.tensor.basic.or_(other, self)
def __rxor__(self, other):
return theano.tensor.basic.xor(other, self)
def __add__(self, other):
try:
return theano.tensor.basic.add(self, other)
except (NotImplementedError, AsTensorError):
# We must return NotImplemented and not an
# NotImplementedError or raise an NotImplementedError.
# That way python will give a good error message like this
# `TypeError: unsupported operand type(s) for +:
# 'TensorVariable' and 'TensorVariable'`
return NotImplemented
def __sub__(self, other):
# See explanation in __add__ for the error catched
# and the return value in that case
try:
return theano.tensor.basic.sub(self, other)
except (NotImplementedError, AsTensorError):
return NotImplemented
def __mul__(self, other):
# See explanation in __add__ for the error catched
# and the return value in that case
try:
return theano.tensor.mul(self, other)
except (NotImplementedError, AsTensorError):
return NotImplemented
def __div__(self, other):
# See explanation in __add__ for the error catched
# and the return value in that case
try:
return theano.tensor.basic.div_proxy(self, other)
except IntegerDivisionError:
# This is to raise the exception that occurs when trying to divide
# two integer arrays (currently forbidden).
raise
except (NotImplementedError, AsTensorError):
return NotImplemented
if PY3:
__truediv__ = __div__
def __pow__(self, other):
# See explanation in __add__ for the error catched
# adn the return value in that case
try:
return theano.tensor.basic.pow(self, other)
except (NotImplementedError, AsTensorError):
return NotImplemented
def __mod__(self, other):
# See explanation in __add__ for the error catched
# adn the return value in that case
try:
return theano.tensor.basic.mod_check(self, other)
except ComplexError:
# This is to raise the exception that occurs when trying to compute
# x % y with either x or y a complex number.
raise
except (NotImplementedError, AsTensorError):
return NotImplemented
def __divmod__(self, other):
return theano.tensor.basic.divmod(self, other)
def __truediv__(self, other):
return theano.tensor.basic.true_div(self, other)
def __floordiv__(self, other):
return theano.tensor.basic.floor_div(self, other)
def __rtruediv__(self, other):
return theano.tensor.basic.true_div(other, self)
def __rfloordiv__(self, other):
return theano.tensor.basic.floor_div(other, self)
# DO NOT USE THESE BECAUSE INPLACE OPS SHOULD BE INSERTED
# BY OPTIMIZATIONS ONLY
# ARITHMETIC - INPLACE
# def __iadd__(self, other):
# return _add_inplace(self, other)
# def __isub__(self, other):
# return _sub_inplace(self, other)
#
# def __imul__(self, other):
# return _mul_inplace(self, other)
#
# def __idiv__(self, other):
# return _div_inplace(self, other)
#
# def __ipow__(self, other):
# return _pow_inplace(self, other)
# ARITHMETIC - RIGHT-OPERAND
def __radd__(self, other):
return theano.tensor.basic.add(other, self)
def __rsub__(self, other):
return theano.tensor.basic.sub(other, self)
def __rmul__(self, other):
return theano.tensor.basic.mul(other, self)
def __rdiv__(self, other):
return theano.tensor.basic.div_proxy(other, self)
def __rmod__(self, other):
return theano.tensor.basic.mod(other, self)
def __rdivmod__(self, other):
return theano.tensor.basic.divmod(other, self)
def __rpow__(self, other):
return theano.tensor.basic.pow(other, self)
# TRANSPOSE
T = property(lambda self: theano.tensor.basic.transpose(self))
def transpose(self, *axes):
if len(axes) == 0:
return theano.tensor.basic.transpose(self)
try:
iter(axes[0])
iterable = True
except TypeError:
iterable = False
if len(axes) == 1 and iterable:
return theano.tensor.basic.transpose(self, axes[0])
else:
return theano.tensor.basic.transpose(self, axes)
shape = property(lambda self: theano.tensor.basic.shape(self))
size = property(lambda self: self.shape[0] if self.ndim == 1 else
theano.tensor.basic.prod(self.shape))
# We can't implement __len__ to provide a better error message.
def any(self, axis=None, keepdims=False):
return theano.tensor.basic.any(self, axis=axis, keepdims=keepdims)
def all(self, axis=None, keepdims=False):
return theano.tensor.basic.all(self, axis=axis, keepdims=keepdims)
ise Exception("Theano Variables can't work with len(Theano "
def reshape(self, shape, ndim=None):
if ndim is not None:
if not isinstance(ndim, integer_types):
raise ValueError("Expected ndim to be an integer, is " +
str(type(ndim)))
return theano.tensor.basic.reshape(self, shape, ndim=ndim)
def dimshuffle(self, *pattern):
if (len(pattern) == 1) and (isinstance(pattern[0], (list, tuple))):
pattern = pattern[0]
op = theano.tensor.basic.DimShuffle(list(self.type.broadcastable),
pattern)
return op(self)
def flatten(self, ndim=1):
return theano.tensor.basic.flatten(self, ndim)
def ravel(self):
return theano.tensor.basic.flatten(self)
def diagonal(self, offset=0, axis1=0, axis2=1):
return theano.tensor.basic.diagonal(self, offset, axis1, axis2)
def transfer(self, target):
return theano.tensor.transfer(self, target)
def arccos(self):
return theano.tensor.arccos(self)
def arccosh(self):
return theano.tensor.arccosh(self)
def arcsin(self):
return theano.tensor.arcsin(self)
def arcsinh(self):
return theano.tensor.arcsinh(self)
def arctan(self):
return theano.tensor.arctan(self)
def arctanh(self):
return theano.tensor.arctanh(self)
def ceil(self):
return theano.tensor.ceil(self)
def cos(self):
return theano.tensor.cos(self)
def cosh(self):
return theano.tensor.cosh(self)
def deg2rad(self):
return theano.tensor.deg2rad(self)
def exp(self):
return theano.tensor.exp(self)
def exp2(self):
return theano.tensor.exp2(self)
def expm1(self):
return theano.tensor.expm1(self)
def floor(self):
return theano.tensor.floor(self)
def log(self):
return theano.tensor.log(self)
def log10(self):
return theano.tensor.log10(self)
def log1p(self):
return theano.tensor.log1p(self)
def log2(self):
return theano.tensor.log2(self)
def rad2deg(self):
return theano.tensor.rad2deg(self)
def sin(self):
return theano.tensor.sin(self)
def sinh(self):
return theano.tensor.sinh(self)
def sqrt(self):
return theano.tensor.sqrt(self)
def tan(self):
return theano.tensor.tan(self)
def tanh(self):
return theano.tensor.tanh(self)
def trunc(self):
return theano.tensor.trunc(self)
def astype(self, dtype):
return theano.tensor.cast(self, dtype)
def __getitem__(self, args):
def check_bool(args_el):
try:
if (isinstance(args_el, (numpy.bool_, bool)) or
args_el.dtype == 'bool'):
raise TypeError('TensorType does not support boolean '
'mask for indexing such as tensor[x==0]. '
'Instead you can use non_zeros() such as '
'tensor[(x == 0).nonzeros()]. ')
except AttributeError:
pass
if (not isinstance(args_el, theano.tensor.Variable) and
isinstance(args_el, collections.Iterable)):
for el in args_el:
check_bool(el)
check_bool(args)
if (isinstance(args, list) and
any([isinstance(a, slice) for a in args])):
pass
elif not isinstance(args, tuple):
args = args,
ellipses = [i
for i, index in enumerate(args)
if index is Ellipsis]
if len(ellipses) > 1:
raise IndexError(
"an index can only have a single Ellipsis (`...`)")
elif len(ellipses) == 1:
new_axes = sum(1
for index in args
if index is numpy.newaxis)
ellipsis_at = ellipses[0]
args = list(args)
args[ellipsis_at: ellipsis_at + 1] = (
[slice(None)] * (self.ndim - (len(args) - 1 - new_axes)))
args = tuple([numpy.array(inp, dtype=numpy.int64)
if(inp == [] or inp == ()) else inp for inp in args])
args = theano.tensor.subtensor.make_constant(args)
advanced = False
axis = None
for i, arg in enumerate(args):
try:
if arg is not numpy.newaxis:
theano.tensor.subtensor.Subtensor.convert(arg)
except theano.tensor.subtensor.AdvancedIndexingError:
if advanced:
axis = None
break
else:
advanced = True
axis = i
if advanced:
if (axis is not None and
all(isinstance(a, slice) and
equal_slices(a, slice(None)) for a in args[:axis]) and
all(isinstance(a, slice) and
equal_slices(a, slice(None)) for a in args[axis + 1:]) and
isinstance(args[axis],
(numpy.ndarray, list,
TensorVariable, TensorConstant,
theano.tensor.sharedvar.TensorSharedVariable))):
return self.take(args[axis], axis)
else:
return theano.tensor.subtensor.advanced_subtensor(self, *args)
else:
if numpy.newaxis in args:
counter = 0
pattern = []
new_args = []
for arg in args:
if arg == numpy.newaxis:
pattern.append('x')
new_args.append(slice(None, None, None))
else:
pattern.append(counter)
counter += 1
new_args.append(arg)
view = self.dimshuffle(pattern)
full_slices = True
for arg in new_args:
# Python 2.7, this call __lt__ if we have a slice
# with some symbolic variable.
if not (isinstance(arg, slice) and
arg.start is None and
arg.stop is None and
arg.step is None):
full_slices = False
if full_slices:
return view
else:
return view.__getitem__(tuple(new_args))
else:
return theano.tensor.subtensor.Subtensor(args)(
self, *theano.tensor.subtensor.Subtensor.collapse(
args,
lambda entry: isinstance(entry, Variable)))
def take(self, indices, axis=None, mode='raise'):
return theano.tensor.subtensor.take(self, indices, axis, mode)
# COPYING
def copy(self, name=None):
copied_variable = theano.tensor.basic.tensor_copy(self)
copied_variable.name = name
return copied_variable
def __iter__(self):
try:
for i in xrange(theano.tensor.basic.get_vector_length(self)):
yield self[i]
except TypeError:
# This prevents accidental iteration via builtin.sum(self)
raise TypeError(('TensorType does not support iteration. '
'Maybe you are using builtin.sum instead of '
'theano.tensor.sum? (Maybe .max?)'))
# CONVENIENT ACCESS TO TYPE PROPERTIES
ndim = property(lambda self: self.type.ndim)
broadcastable = property(lambda self: self.type.broadcastable)
dtype = property(lambda self: self.type.dtype)
# extra pseudo-operator symbols
def __dot__(left, right):
return theano.tensor.basic.dot(left, right)
def __rdot__(right, left):
return theano.tensor.basic.dot(left, right)
dot = __dot__
def sum(self, axis=None, dtype=None, keepdims=False, acc_dtype=None):
return theano.tensor.basic.sum(self, axis=axis,
dtype=dtype, keepdims=keepdims,
acc_dtype=acc_dtype)
def prod(self, axis=None, dtype=None, keepdims=False, acc_dtype=None):
return theano.tensor.basic.prod(self, axis=axis,
dtype=dtype, keepdims=keepdims,
acc_dtype=acc_dtype)
def norm(self, L, axis=None, keepdims=False):
if L == 0:
raise NotImplementedError()
if numpy.isinf(L):
raise NotImplementedError()
# optimizations will/should catch cases like L=1, L=2
y = theano.tensor.basic.pow(
theano.tensor.basic.pow(
theano.tensor.basic.abs_(self), L).sum(axis=axis), 1.0 / L)
if keepdims:
return theano.tensor.basic.makeKeepDims(self, y, axis)
else:
return y
def mean(self, axis=None, dtype=None, keepdims=False, acc_dtype=None):
return theano.tensor.basic.mean(self, axis=axis,
dtype=dtype, keepdims=keepdims,
acc_dtype=acc_dtype)
def var(self, axis=None, ddof=0, keepdims=False, corrected=False):
return theano.tensor.basic.var(self, axis=axis, ddof=ddof,
keepdims=keepdims, corrected=corrected)
def std(self, axis=None, ddof=0, keepdims=False, corrected=False):
return theano.tensor.basic.std(self, axis=axis, ddof=ddof,
keepdims=keepdims, corrected=corrected)
def min(self, axis=None, keepdims=False):
return theano.tensor.basic.min(self, axis, keepdims=keepdims)
def max(self, axis=None, keepdims=False):
return theano.tensor.basic.max(self, axis, keepdims=keepdims)
def argmin(self, axis=None, keepdims=False):
return theano.tensor.basic.argmin(self, axis, keepdims=keepdims)
def argmax(self, axis=None, keepdims=False):
return theano.tensor.basic.argmax(self, axis, keepdims=keepdims)
def nonzero(self, return_matrix=False):
return theano.tensor.basic.nonzero(self, return_matrix=return_matrix)
def nonzero_values(self):
return theano.tensor.basic.nonzero_values(self)
def sort(self, axis=-1, kind='quicksort', order=None):
return theano.tensor.sort(self, axis, kind, order)
def argsort(self, axis=-1, kind='quicksort', order=None):
return theano.tensor.argsort(self, axis, kind, order)
def clip(self, a_min, a_max):
return theano.tensor.basic.clip(self, a_min, a_max)
def conj(self):
return theano.tensor.basic.conj(self)
conjugate = conj
def repeat(self, repeats, axis=None):
return theano.tensor.extra_ops.repeat(self, repeats, axis)
def round(self, mode=None):
return theano.tensor.basic.round(self, mode)
def trace(self):
return theano.tensor.nlinalg.trace(self)
# TO TRUMP NUMPY OPERATORS
__array_priority__ = 1000
def get_scalar_constant_value(self):
return theano.tensor.basic.get_scalar_constant_value(self)
def zeros_like(model, dtype=None):
return theano.tensor.basic.zeros_like(model, dtype=dtype)
def ones_like(model, dtype=None):
return theano.tensor.basic.ones_like(model, dtype=dtype)
def cumsum(self, axis=None):
return theano.tensor.extra_ops.cumsum(self, axis)
def cumprod(self, axis=None):
return theano.tensor.extra_ops.cumprod(self, axis)
def searchsorted(self, v, side='left', sorter=None):
return theano.tensor.extra_ops.searchsorted(self, v, side, sorter)
def ptp(self, axis=None):
return theano.tensor.ptp(self, axis)
def swapaxes(self, axis1, axis2):
return theano.tensor.basic.swapaxes(self, axis1, axis2)
def fill(self, value):
return theano.tensor.basic.fill(self, value)
def choose(self, a, choices, out=None, mode='raise'):
return theano.tensor.basic.choose(self, a, choices, out=None,
mode='raise')
def squeeze(self):
return theano.tensor.extra_ops.squeeze(self)
def compress(self, a, axis=None):
return theano.tensor.extra_ops.compress(self, a, axis=axis)
class TensorVariable(_tensor_py_operators, Variable):
def __init__(self, type, owner=None, index=None, name=None):
super(TensorVariable, self).__init__(type, owner=owner,
index=index, name=name)
if (config.warn_float64 != 'ignore' and type.dtype == 'float64'):
msg = ('You are creating a TensorVariable '
'with float64 dtype. You requested an action via '
'the Theano flag warn_float64={ignore,warn,raise,pdb}.')
if config.warn_float64 == "warn":
# Get the user stack. We don't want function inside the
x = tb.extract_stack()
nb_rm = 0
while x:
file_path = x[-1][0]
rm = False
for p in ["theano/tensor/", "theano\\tensor\\",
"theano/gof/", "theano\\tensor\\"]:
if p in file_path:
x = x[:-1]
nb_rm += 1
rm = True
break
if not rm:
break
warnings.warn(msg, stacklevel=1 + nb_rm)
elif config.warn_float64 == "raise":
raise Exception(msg)
elif config.warn_float64 == 'pdb':
import pdb
pdb.set_trace()
TensorType.Variable = TensorVariable
class TensorConstantSignature(tuple):
def __eq__(self, other):
if type(self) != type(other):
return False
try:
(t0, d0), (t1, d1) = self, other
except Exception:
return False
if t0 != t1 or d0.shape != d1.shape:
return False
self.no_nan
if self.has_nan:
other.no_nan
return (other.has_nan and
self.sum == other.sum and
(self.no_nan.mask == other.no_nan.mask).all() and
(self.no_nan.mask.all() or
(self.no_nan == other.no_nan).all()))
else:
return (self.sum == other.sum) and numpy.all(d0 == d1)
def __hash__(self):
t, d = self
return hashtype(self) ^ hash(t) ^ hash(d.shape) ^ hash(self.sum)
def theano_hash(self):
_, d = self
return hash_from_ndarray(d)
def _get_sum(self):
try:
return self._sum
except AttributeError:
self._sum = self.no_nan.sum()
if type(self._sum) is numpy.memmap:
self._sum = numpy.asarray(self._sum).item()
if self.has_nan and self.no_nan.mask.all():
# In this case the sum is not properly computed by numpy.
self._sum = 0
if numpy.isinf(self._sum) or numpy.isnan(self._sum):
# NaN may happen when there are both -inf and +inf values.
if self.has_nan:
# Filter both NaN and Inf values.
mask = self.no_nan.mask + numpy.isinf(self[1])
else:
# Filter only Inf values.
mask = numpy.isinf(self[1])
if mask.all():
self._sum = 0
else:
self._sum = numpy.ma.masked_array(self[1], mask).sum()
# At this point there should be no more NaN.
assert not numpy.isnan(self._sum)
return self._sum
sum = property(_get_sum)
def _get_no_nan(self):
try:
return self._no_nan
except AttributeError:
nan_mask = numpy.isnan(self[1])
if nan_mask.any():
self._no_nan = numpy.ma.masked_array(self[1], nan_mask)
self.has_nan = True
else:
self._no_nan = self[1]
self.has_nan = False
return self._no_nan
no_nan = property(_get_no_nan)
class TensorConstant(_tensor_py_operators, Constant):
def __init__(self, type, data, name=None):
Constant.__init__(self, type, data, name)
self.tag.unique_value = None
if isinstance(data, numpy.ndarray) and data.ndim > 0:
flat_data = data.ravel()
if flat_data.shape[0]:
if (flat_data == flat_data[0]).all():
self.tag.unique_value = flat_data[0]
def __str__(self):
if self.tag.unique_value is not None:
name = "%s of %s" % (str(self.data.shape),
str(self.tag.unique_value))
else:
name = "%s" % self.data
if len(name) > 20:
name = name[:10] + ".." + name[-10:]
return "TensorConstant{%s}" % name
def signature(self):
return TensorConstantSignature((self.type, self.data))
def equals(self, other):
# Override Contant.equals to allow to compare with
# numpy.ndarray, and python type.
if isinstance(other, (numpy.ndarray, int, float)):
# Make a TensorConstant to be able to compare
other = theano.tensor.basic.constant(other)
return (isinstance(other, TensorConstant) and
self.signature() == other.signature())
def __copy__(self):
# We need to do this to remove the cached attribute
return type(self)(self.type, self.data, self.name)
def __deepcopy__(self, memo):
# We need to do this to remove the cached attribute
return type(self)(copy.deepcopy(self.type, memo),
copy.deepcopy(self.data, memo),
copy.deepcopy(self.name, memo))
TensorType.Constant = TensorConstant
| true
| true
|
1c47fc4c54f24031a9b208b6e98961d9867c8432
| 5,229
|
py
|
Python
|
desktop/libs/indexer/src/indexer/conf.py
|
10088/hue
|
802811941dabd015a4fd7a640d349f9d26ac5572
|
[
"Apache-2.0"
] | null | null | null |
desktop/libs/indexer/src/indexer/conf.py
|
10088/hue
|
802811941dabd015a4fd7a640d349f9d26ac5572
|
[
"Apache-2.0"
] | null | null | null |
desktop/libs/indexer/src/indexer/conf.py
|
10088/hue
|
802811941dabd015a4fd7a640d349f9d26ac5572
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from future import standard_library
standard_library.install_aliases()
import logging
import os
import sys
from desktop.lib.conf import Config, coerce_bool
from libsolr import conf as libsolr_conf
from libzookeeper import conf as libzookeeper_conf
if sys.version_info[0] > 2:
from urllib.parse import urlparse
from django.utils.translation import gettext_lazy as _t
else:
from urlparse import urlparse
from django.utils.translation import ugettext_lazy as _t
LOG = logging.getLogger(__name__)
# Deprecated. Should be automatically guessed from Solr admin info API now.
def get_solr_ensemble():
return '%s%s' % (libzookeeper_conf.ENSEMBLE.get(), libsolr_conf.SOLR_ZK_PATH.get())
def solrctl():
"""
solrctl path
"""
for dirname in os.environ.get('PATH', '').split(os.path.pathsep):
path = os.path.join(dirname, 'solrctl')
if os.path.exists(path):
return path
return None
def zkensemble():
"""
ZooKeeper Ensemble
"""
try:
from zookeeper.conf import CLUSTERS
clusters = CLUSTERS.get()
if clusters['default'].HOST_PORTS.get() != 'localhost:2181':
return '%s/solr' % clusters['default'].HOST_PORTS.get()
except:
LOG.warning('Failed to get Zookeeper ensemble')
try:
from search.conf import SOLR_URL
parsed = urlparse(SOLR_URL.get())
return "%s:2181/solr" % (parsed.hostname or 'localhost')
except:
LOG.warning('Failed to get Solr url')
# Deprecated as always on
ENABLE_NEW_INDEXER = Config(
key="enable_new_indexer",
help=_t("Flag to turn on the new Solr indexer."),
type=coerce_bool,
default=True
)
ENABLE_SCALABLE_INDEXER = Config(
key="enable_scalable_indexer",
help=_t("Flag to turn on the Morphline Solr indexer."),
type=coerce_bool,
default=True
)
CONFIG_INDEXER_LIBS_PATH = Config(
key="config_indexer_libs_path",
help=_t("Filesystem directory containing Solr Morphline indexing libs."),
type=str,
default='/tmp/smart_indexer_lib'
)
CONFIG_JDBC_LIBS_PATH = Config(
key="config_jdbc_libs_path",
help=_t("Filesystem directory containing JDBC libs."),
type=str,
default='/user/oozie/libext/jdbc_drivers'
)
CONFIG_JARS_LIBS_PATH = Config(
key="config_jars_libs_path",
help=_t("Filesystem directory containing jars libs."),
type=str,
default='/user/oozie/libext/libs'
)
ENABLE_SQOOP = Config(
key="enable_sqoop",
help=_t("Flag to turn on Sqoop imports."),
type=coerce_bool,
default=True
)
ENABLE_KAFKA = Config(
key="enable_kafka",
help=_t("Flag to turn on Kafka imports."),
type=coerce_bool,
default=False
)
ENABLE_FIELD_EDITOR = Config(
key="enable_field_editor",
help=_t("Flag to turn on the SQL/Morphline field editor."),
type=coerce_bool,
default=False
)
ENABLE_ENVELOPE = Config(
key="enable_envelope",
help=_t("Flag to turn on Envelope based jobs."),
type=coerce_bool,
default=False
)
ENABLE_ALTUS = Config(
key="enable_altus",
help=_t("Flag to turn on Altus imports."),
type=coerce_bool,
default=False
)
ENABLE_DIRECT_UPLOAD = Config(
key="enable_direct_upload",
help=_t("Flag to turn on the direct upload of a small file."),
type=coerce_bool,
default=True
)
# Unused
BATCH_INDEXER_PATH = Config(
key="batch_indexer_path",
help=_t("Batch indexer path in HDFS."),
type=str,
default="/var/lib/search/search-mr-job.jar")
CORE_INSTANCE_DIR = Config(
key="core_instance_dir",
help=_t("Local path to Hue folder where Solr instance directories will be created in non-solrcloud mode."),
type=str,
default=os.path.join(os.path.dirname(__file__), '../data/collections'))
CONFIG_TEMPLATE_PATH = Config(
key="config_template_path",
help=_t("Default template used at collection creation."),
type=str,
default=os.path.join(os.path.dirname(__file__), '..', 'data', 'solrconfigs'))
CONFIG_INDEXING_TEMPLATES_PATH = Config(
key="config_oozie_workspace_path",
help=_t("oozie workspace template for indexing:"),
type=str,
default=os.path.join(os.path.dirname(__file__), '..', 'data', 'oozie_workspace')
)
def config_morphline_path():
return os.path.join(os.path.dirname(__file__), '..', 'data', 'morphline')
# Unused
SOLRCTL_PATH = Config(
key="solrctl_path",
help=_t("Location of the solrctl binary."),
type=str,
dynamic_default=solrctl)
# Deprecated and not used anymore
SOLR_ZK_ENSEMBLE = Config(
key="solr_zk_ensemble",
help=_t("Zookeeper ensemble."),
type=str,
dynamic_default=zkensemble)
| 26.409091
| 109
| 0.733601
|
from future import standard_library
standard_library.install_aliases()
import logging
import os
import sys
from desktop.lib.conf import Config, coerce_bool
from libsolr import conf as libsolr_conf
from libzookeeper import conf as libzookeeper_conf
if sys.version_info[0] > 2:
from urllib.parse import urlparse
from django.utils.translation import gettext_lazy as _t
else:
from urlparse import urlparse
from django.utils.translation import ugettext_lazy as _t
LOG = logging.getLogger(__name__)
def get_solr_ensemble():
return '%s%s' % (libzookeeper_conf.ENSEMBLE.get(), libsolr_conf.SOLR_ZK_PATH.get())
def solrctl():
for dirname in os.environ.get('PATH', '').split(os.path.pathsep):
path = os.path.join(dirname, 'solrctl')
if os.path.exists(path):
return path
return None
def zkensemble():
try:
from zookeeper.conf import CLUSTERS
clusters = CLUSTERS.get()
if clusters['default'].HOST_PORTS.get() != 'localhost:2181':
return '%s/solr' % clusters['default'].HOST_PORTS.get()
except:
LOG.warning('Failed to get Zookeeper ensemble')
try:
from search.conf import SOLR_URL
parsed = urlparse(SOLR_URL.get())
return "%s:2181/solr" % (parsed.hostname or 'localhost')
except:
LOG.warning('Failed to get Solr url')
ENABLE_NEW_INDEXER = Config(
key="enable_new_indexer",
help=_t("Flag to turn on the new Solr indexer."),
type=coerce_bool,
default=True
)
ENABLE_SCALABLE_INDEXER = Config(
key="enable_scalable_indexer",
help=_t("Flag to turn on the Morphline Solr indexer."),
type=coerce_bool,
default=True
)
CONFIG_INDEXER_LIBS_PATH = Config(
key="config_indexer_libs_path",
help=_t("Filesystem directory containing Solr Morphline indexing libs."),
type=str,
default='/tmp/smart_indexer_lib'
)
CONFIG_JDBC_LIBS_PATH = Config(
key="config_jdbc_libs_path",
help=_t("Filesystem directory containing JDBC libs."),
type=str,
default='/user/oozie/libext/jdbc_drivers'
)
CONFIG_JARS_LIBS_PATH = Config(
key="config_jars_libs_path",
help=_t("Filesystem directory containing jars libs."),
type=str,
default='/user/oozie/libext/libs'
)
ENABLE_SQOOP = Config(
key="enable_sqoop",
help=_t("Flag to turn on Sqoop imports."),
type=coerce_bool,
default=True
)
ENABLE_KAFKA = Config(
key="enable_kafka",
help=_t("Flag to turn on Kafka imports."),
type=coerce_bool,
default=False
)
ENABLE_FIELD_EDITOR = Config(
key="enable_field_editor",
help=_t("Flag to turn on the SQL/Morphline field editor."),
type=coerce_bool,
default=False
)
ENABLE_ENVELOPE = Config(
key="enable_envelope",
help=_t("Flag to turn on Envelope based jobs."),
type=coerce_bool,
default=False
)
ENABLE_ALTUS = Config(
key="enable_altus",
help=_t("Flag to turn on Altus imports."),
type=coerce_bool,
default=False
)
ENABLE_DIRECT_UPLOAD = Config(
key="enable_direct_upload",
help=_t("Flag to turn on the direct upload of a small file."),
type=coerce_bool,
default=True
)
BATCH_INDEXER_PATH = Config(
key="batch_indexer_path",
help=_t("Batch indexer path in HDFS."),
type=str,
default="/var/lib/search/search-mr-job.jar")
CORE_INSTANCE_DIR = Config(
key="core_instance_dir",
help=_t("Local path to Hue folder where Solr instance directories will be created in non-solrcloud mode."),
type=str,
default=os.path.join(os.path.dirname(__file__), '../data/collections'))
CONFIG_TEMPLATE_PATH = Config(
key="config_template_path",
help=_t("Default template used at collection creation."),
type=str,
default=os.path.join(os.path.dirname(__file__), '..', 'data', 'solrconfigs'))
CONFIG_INDEXING_TEMPLATES_PATH = Config(
key="config_oozie_workspace_path",
help=_t("oozie workspace template for indexing:"),
type=str,
default=os.path.join(os.path.dirname(__file__), '..', 'data', 'oozie_workspace')
)
def config_morphline_path():
return os.path.join(os.path.dirname(__file__), '..', 'data', 'morphline')
SOLRCTL_PATH = Config(
key="solrctl_path",
help=_t("Location of the solrctl binary."),
type=str,
dynamic_default=solrctl)
SOLR_ZK_ENSEMBLE = Config(
key="solr_zk_ensemble",
help=_t("Zookeeper ensemble."),
type=str,
dynamic_default=zkensemble)
| true
| true
|
1c47fe26c578435ee60bd4af2a50e86627e1777a
| 956
|
py
|
Python
|
books/model/UserList.py
|
nudglabs/books-python-wrappers
|
8844eca8fe681542644a70749b72a6dc4e48c171
|
[
"MIT"
] | 9
|
2015-04-01T08:59:49.000Z
|
2022-01-27T01:27:45.000Z
|
books/model/UserList.py
|
nudglabs/books-python-wrappers
|
8844eca8fe681542644a70749b72a6dc4e48c171
|
[
"MIT"
] | 3
|
2020-05-14T04:22:22.000Z
|
2021-08-06T11:19:03.000Z
|
books/model/UserList.py
|
nudglabs/books-python-wrappers
|
8844eca8fe681542644a70749b72a6dc4e48c171
|
[
"MIT"
] | 11
|
2016-04-14T10:59:36.000Z
|
2020-08-19T13:26:05.000Z
|
#$Id$
from books.model.PageContext import PageContext
class UserList:
"""This class is used to create object for Users list."""
def __init__(self):
"""Initialize parameters for users."""
self.users = []
self.page_context = PageContext()
def set_users(self, user):
"""Set users.
Args:
user(instance): User object.
"""
self.users.append(user)
def get_users(self):
"""Get users.
Returns:
list of instance: List of user objects.
"""
return self.users
def set_page_context(self, page_context):
"""Set page context.
Args:
page_context(instance): Page context object.
"""
self.page_context = page_context
def get_page_context(self):
"""Get page context.
Returns:
instance: Page context object.
"""
return self.page_context
| 19.916667
| 61
| 0.560669
|
from books.model.PageContext import PageContext
class UserList:
def __init__(self):
self.users = []
self.page_context = PageContext()
def set_users(self, user):
self.users.append(user)
def get_users(self):
return self.users
def set_page_context(self, page_context):
self.page_context = page_context
def get_page_context(self):
return self.page_context
| true
| true
|
1c47ff9b3f92607d61112ac9b4852bb244a3b137
| 2,539
|
py
|
Python
|
env/Lib/site-packages/algorithmia_api_client/models/language.py
|
Vivek-Kamboj/Sargam
|
0f3ca5c70ddb722dd40a45373abd0e9b3939064e
|
[
"MIT"
] | 2
|
2020-05-20T23:10:31.000Z
|
2020-12-09T13:00:06.000Z
|
env/Lib/site-packages/algorithmia_api_client/models/language.py
|
Vivek-Kamboj/Sargam
|
0f3ca5c70ddb722dd40a45373abd0e9b3939064e
|
[
"MIT"
] | 5
|
2021-04-25T08:16:09.000Z
|
2022-03-12T00:42:14.000Z
|
env/Lib/site-packages/algorithmia_api_client/models/language.py
|
Vivek-Kamboj/Sargam
|
0f3ca5c70ddb722dd40a45373abd0e9b3939064e
|
[
"MIT"
] | 1
|
2021-10-01T14:32:25.000Z
|
2021-10-01T14:32:25.000Z
|
# coding: utf-8
"""
Algorithmia Management APIs
APIs for managing actions on the Algorithmia platform # noqa: E501
OpenAPI spec version: 1.0.1
Contact: support@algorithmia.com
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
class Language(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
allowed enum values
"""
JAVA = "java"
JAVASCRIPT = "javascript"
PYTHON2_LANGPACK = "python2-langpack"
PYTHON3_1 = "python3-1"
R = "r"
RUBY = "ruby"
RUST = "rust"
SCALA = "scala"
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
}
attribute_map = {
}
def __init__(self): # noqa: E501
"""Language - a model defined in OpenAPI""" # noqa: E501
self.discriminator = None
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Language):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 25.908163
| 74
| 0.550217
|
import pprint
import re
import six
class Language(object):
JAVA = "java"
JAVASCRIPT = "javascript"
PYTHON2_LANGPACK = "python2-langpack"
PYTHON3_1 = "python3-1"
R = "r"
RUBY = "ruby"
RUST = "rust"
SCALA = "scala"
openapi_types = {
}
attribute_map = {
}
def __init__(self):
self.discriminator = None
def to_dict(self):
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, Language):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true
| true
|
1c4800c6ae1522e0be0378191ffb0de791b183ba
| 541
|
py
|
Python
|
api/serializers.py
|
coder-chris-git/django-react-love_backend
|
11b7366f6c3c4d9d3b72b145e100b26305c52128
|
[
"MIT"
] | null | null | null |
api/serializers.py
|
coder-chris-git/django-react-love_backend
|
11b7366f6c3c4d9d3b72b145e100b26305c52128
|
[
"MIT"
] | null | null | null |
api/serializers.py
|
coder-chris-git/django-react-love_backend
|
11b7366f6c3c4d9d3b72b145e100b26305c52128
|
[
"MIT"
] | null | null | null |
from django.db.models.base import Model
from rest_framework.fields import IntegerField
from rest_framework.relations import StringRelatedField
from .models import BoastAndRoastModel
from rest_framework.serializers import ModelSerializer,SlugRelatedField
class BoastAndRoastSerializer(ModelSerializer):
date_created = StringRelatedField()
last_updated = StringRelatedField()
class Meta:
model = BoastAndRoastModel
fields = ['id','post_type','body','upvote','downvote','date_created','last_updated','total_votes']
| 28.473684
| 102
| 0.796673
|
from django.db.models.base import Model
from rest_framework.fields import IntegerField
from rest_framework.relations import StringRelatedField
from .models import BoastAndRoastModel
from rest_framework.serializers import ModelSerializer,SlugRelatedField
class BoastAndRoastSerializer(ModelSerializer):
date_created = StringRelatedField()
last_updated = StringRelatedField()
class Meta:
model = BoastAndRoastModel
fields = ['id','post_type','body','upvote','downvote','date_created','last_updated','total_votes']
| true
| true
|
1c4801010c3d4e1174d971d52ec35e804cc3f383
| 7,567
|
py
|
Python
|
conf.py
|
Mozilla-GitHub-Standards/7c3e4fe1e3e9e45496cb7bc94f1ff8b3ec66eca2e358a0c7231317e53ec1f6bc
|
057a68a7c76270de37df5e1829af2be047852d01
|
[
"CC-BY-4.0"
] | 4
|
2016-01-10T19:09:11.000Z
|
2019-10-01T16:24:33.000Z
|
conf.py
|
Mozilla-GitHub-Standards/7c3e4fe1e3e9e45496cb7bc94f1ff8b3ec66eca2e358a0c7231317e53ec1f6bc
|
057a68a7c76270de37df5e1829af2be047852d01
|
[
"CC-BY-4.0"
] | 13
|
2015-01-02T19:26:04.000Z
|
2019-03-29T12:35:23.000Z
|
conf.py
|
Mozilla-GitHub-Standards/7c3e4fe1e3e9e45496cb7bc94f1ff8b3ec66eca2e358a0c7231317e53ec1f6bc
|
057a68a7c76270de37df5e1829af2be047852d01
|
[
"CC-BY-4.0"
] | 16
|
2015-01-01T16:32:37.000Z
|
2020-08-18T19:21:41.000Z
|
# -*- coding: utf-8 -*-
#
# A-team Bootcamp documentation build configuration file, created by
# sphinx-quickstart on Thu May 5 14:21:14 2011.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
on_rtd = os.environ.get('READTHEDOCS') == 'True'
if not on_rtd:
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.intersphinx', 'sphinx.ext.todo']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'A-Team Bootcamp'
copyright = u'Mozilla. This work is licensed under a Creative Commons Attribution 4.0 International License'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.3'
# The full version, including alpha/beta/rc tags.
release = '0.3beta'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build', 'README.rst']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# Show todo items
todo_include_todos = True
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'AteamBootcampdoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'AteamBootcamp.tex', u'A-Team Bootcamp Documentation',
u'Mozilla Automation \& Tools', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'ateambootcamp', u'A-team Bootcamp Documentation',
[u'Mozilla Automation \& Tools'], 1)
]
intersphinx_mapping = dict(
playdoh=('https://playdoh.readthedocs.io/en/latest/', None)
)
| 33.188596
| 108
| 0.722743
|
import os
on_rtd = os.environ.get('READTHEDOCS') == 'True'
if not on_rtd:
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
extensions = ['sphinx.ext.intersphinx', 'sphinx.ext.todo']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'A-Team Bootcamp'
copyright = u'Mozilla. This work is licensed under a Creative Commons Attribution 4.0 International License'
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.3'
# The full version, including alpha/beta/rc tags.
release = '0.3beta'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build', 'README.rst']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# Show todo items
todo_include_todos = True
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'AteamBootcampdoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'AteamBootcamp.tex', u'A-Team Bootcamp Documentation',
u'Mozilla Automation \& Tools', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'ateambootcamp', u'A-team Bootcamp Documentation',
[u'Mozilla Automation \& Tools'], 1)
]
intersphinx_mapping = dict(
playdoh=('https://playdoh.readthedocs.io/en/latest/', None)
)
| true
| true
|
1c48015d7ae84e2a3dd6ece846052a7d5758efc6
| 2,174
|
py
|
Python
|
chapter03/python/item_cf.py
|
coco-in-bluemoon/building-recommendation-engines
|
b337b2ba75b6c9b08612ab1720a2858e64e9de09
|
[
"MIT"
] | null | null | null |
chapter03/python/item_cf.py
|
coco-in-bluemoon/building-recommendation-engines
|
b337b2ba75b6c9b08612ab1720a2858e64e9de09
|
[
"MIT"
] | null | null | null |
chapter03/python/item_cf.py
|
coco-in-bluemoon/building-recommendation-engines
|
b337b2ba75b6c9b08612ab1720a2858e64e9de09
|
[
"MIT"
] | null | null | null |
import numpy as np
import pandas as pd
# 1. load dataset
ratings = pd.read_csv('chapter02/data/movie_rating.csv')
movie_ratings = pd.pivot_table(
ratings,
values='rating',
index='title',
columns='critic'
)
# 2. calculate similarity
def calcualte_norm(u):
norm_u = 0.0
for ui in u:
if np.isnan(ui):
continue
norm_u += (ui ** 2)
return np.sqrt(norm_u)
def calculate_cosine_similarity(u, v):
norm_u = calcualte_norm(u)
norm_v = calcualte_norm(v)
denominator = norm_u * norm_v
numerator = 0.0
for ui, vi in zip(u, v):
if np.isnan(ui) or np.isnan(vi):
continue
numerator += (ui * vi)
similarity = numerator / denominator
return similarity
titles = movie_ratings.index
sim_items = pd.DataFrame(0, columns=titles, index=titles, dtype=float)
for src in titles:
for dst in titles:
src_vec = movie_ratings.loc[src, :].values
dst_vec = movie_ratings.loc[dst, :].values
similarity = calculate_cosine_similarity(src_vec, dst_vec)
sim_items.loc[src, dst] = similarity
print(sim_items)
# 3. Make Prediction & Recommendation
user_id = 5
ratings_critic = movie_ratings.loc[:, [movie_ratings.columns[user_id]]]
ratings_critic.columns = ['rating']
titles_na_critic = ratings_critic[pd.isna(ratings_critic.rating)].index
ratings_t = ratings.loc[ratings.critic == movie_ratings.columns[user_id]]
ratings_t = ratings_t.reset_index(drop=True)
x = sim_items.loc[:, titles_na_critic]
ratings_t = pd.merge(ratings_t, x, on='title')
print(ratings_t)
result_dict = {'title': list(), 'rating': list(), 'similarity': list()}
for row in ratings_t.iterrows():
for title in titles_na_critic:
result_dict['title'].append(title)
result_dict['rating'].append(row[1]['rating'])
result_dict['similarity'].append(row[1][title])
result = pd.DataFrame(result_dict)
result.loc[:, 'sim_rating'] = result.rating * result.similarity
result = result.groupby('title').sum()
result.loc[:, 'prediction'] = result.sim_rating / result.similarity
result = result.drop(columns=['rating', 'similarity', 'sim_rating'])
print(result)
| 26.839506
| 73
| 0.684913
|
import numpy as np
import pandas as pd
ratings = pd.read_csv('chapter02/data/movie_rating.csv')
movie_ratings = pd.pivot_table(
ratings,
values='rating',
index='title',
columns='critic'
)
def calcualte_norm(u):
norm_u = 0.0
for ui in u:
if np.isnan(ui):
continue
norm_u += (ui ** 2)
return np.sqrt(norm_u)
def calculate_cosine_similarity(u, v):
norm_u = calcualte_norm(u)
norm_v = calcualte_norm(v)
denominator = norm_u * norm_v
numerator = 0.0
for ui, vi in zip(u, v):
if np.isnan(ui) or np.isnan(vi):
continue
numerator += (ui * vi)
similarity = numerator / denominator
return similarity
titles = movie_ratings.index
sim_items = pd.DataFrame(0, columns=titles, index=titles, dtype=float)
for src in titles:
for dst in titles:
src_vec = movie_ratings.loc[src, :].values
dst_vec = movie_ratings.loc[dst, :].values
similarity = calculate_cosine_similarity(src_vec, dst_vec)
sim_items.loc[src, dst] = similarity
print(sim_items)
user_id = 5
ratings_critic = movie_ratings.loc[:, [movie_ratings.columns[user_id]]]
ratings_critic.columns = ['rating']
titles_na_critic = ratings_critic[pd.isna(ratings_critic.rating)].index
ratings_t = ratings.loc[ratings.critic == movie_ratings.columns[user_id]]
ratings_t = ratings_t.reset_index(drop=True)
x = sim_items.loc[:, titles_na_critic]
ratings_t = pd.merge(ratings_t, x, on='title')
print(ratings_t)
result_dict = {'title': list(), 'rating': list(), 'similarity': list()}
for row in ratings_t.iterrows():
for title in titles_na_critic:
result_dict['title'].append(title)
result_dict['rating'].append(row[1]['rating'])
result_dict['similarity'].append(row[1][title])
result = pd.DataFrame(result_dict)
result.loc[:, 'sim_rating'] = result.rating * result.similarity
result = result.groupby('title').sum()
result.loc[:, 'prediction'] = result.sim_rating / result.similarity
result = result.drop(columns=['rating', 'similarity', 'sim_rating'])
print(result)
| true
| true
|
1c48031ea57f3f19e9314a5f6ec8871aefc6ec8a
| 3,084
|
py
|
Python
|
mainscenemaker-2015/sku_enc.py
|
RN-JK/Ubiart-Tape-Serializer
|
879bfe27b11c290e5653dac8735ddba322bb5716
|
[
"MIT"
] | null | null | null |
mainscenemaker-2015/sku_enc.py
|
RN-JK/Ubiart-Tape-Serializer
|
879bfe27b11c290e5653dac8735ddba322bb5716
|
[
"MIT"
] | null | null | null |
mainscenemaker-2015/sku_enc.py
|
RN-JK/Ubiart-Tape-Serializer
|
879bfe27b11c290e5653dac8735ddba322bb5716
|
[
"MIT"
] | null | null | null |
import os, struct, json, zlib, shutil
print("SKUSCENE ENCRYPTOR BY: JACKLSUMMER15")
with open("input.json") as f:
sku=json.load(f)
mapnames=sku[0]["songs"]
map_count=0
try:
os.mkdir('output')
except:
pass
skuenc=open("output/skuscene_maps_pc_all.isc.ckd","wb")
skudb="skuscene_db"
skubasetpl="skuscene_base.tpl"
skubasepath="world/skuscenes/"
skuenc.write(b'\x00\x00\x00\x01\x00\x02\x6C\xD2\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')
skuenc.write(struct.pack(">I",1+len(mapnames)))
# skuscene base & skuscene db
skuenc.write(b'\x97\xCA\x62\x8B\x00\x00\x00\x00\x3F\x80\x00\x00\x3F\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0B\x73\x6B\x75\x73\x63\x65\x6E\x65\x5F\x64\x62\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xFF\xFF\xFF\xFF\x00\x00\x00\x00\x00\x00\x00\x11\x73\x6B\x75\x73\x63\x65\x6E\x65\x5F\x62\x61\x73\x65\x2E\x74\x70\x6C\x00\x00\x00\x10\x77\x6F\x72\x6C\x64\x2F\x73\x6B\x75\x73\x63\x65\x6E\x65\x73\x2F\x0C\x1C\x9B\x77\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x40\x55\x79\xFB')
for i, mapname in enumerate(mapnames,1):
mapnamelow=mapname.lower()
print("adding "+mapname+"...")
songdesctpl="songdesc.tpl"
songdescpath="world/jd2015/"+mapnamelow+"/"
skuenc.write(b'\x97\xCA\x62\x8B\x00\x00\x00\x00\x3F\x80\x00\x00\x3F\x80\x00\x00\x00\x00\x00\x00')
skuenc.write(struct.pack(">I",len(mapname))+mapname.encode())
skuenc.write(b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xFF\xFF\xFF\xFF\x00\x00\x00\x00')
skuenc.write(struct.pack(">I",len(songdesctpl))+songdesctpl.encode())
skuenc.write(struct.pack(">I",len(songdescpath))+songdescpath.encode()+struct.pack("<I",zlib.crc32(songdescpath.encode())))
skuenc.write(b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\xE0\x7F\xCC\x3F')
map_count+=1
skuenc.write(b'\x00\x00\x00\x00\x00\x00\x00\x00')
skuenc.write(b'\x00\x00\x00\x01\xF8\x78\xDC\x2D'+struct.pack(">I",len(sku[0]["sku"]))+sku[0]["sku"].encode()+struct.pack(">I",len(sku[0]["territory"]))+sku[0]["territory"].encode()+b'\x00\x00\x00\x13\x72\x61\x74\x69\x6E\x67\x5F\x65\x73\x72\x62\x5F\x31\x36\x39\x2E\x69\x73\x63\x00\x00\x00\x2D\x77\x6F\x72\x6C\x64\x2F\x6A\x64\x32\x30\x31\x35\x2F\x5F\x75\x69\x2F\x73\x63\x72\x65\x65\x6E\x73\x2F\x62\x6F\x6F\x74\x73\x65\x71\x75\x65\x6E\x63\x65\x2F\x72\x61\x74\x69\x6E\x67\x2F\xF8\x41\x4C\x62\x00\x00\x00\x00')
skuenc.write(struct.pack(">I",len(mapnames)))
for i, mapname in enumerate(mapnames,1):
mapnamelow=mapname.lower()
covergenericact=mapnamelow+"_cover_generic.act"
menuartpath="world/jd2015/"+mapnamelow+"/menuart/actors/"
skuenc.write(struct.pack(">I",len(mapname))+mapname.encode())
skuenc.write(struct.pack(">I",len(covergenericact))+covergenericact.encode())
skuenc.write(struct.pack(">I",len(menuartpath))+menuartpath.encode()+struct.pack("<I",zlib.crc32(menuartpath.encode())))
skuenc.write(b'\x00\x00\x00\x00\x00\x00\x00\x00')
map_count+=1
skuenc.write(b'\x00\x00\x00\x00')
skuenc.close()
| 39.538462
| 514
| 0.703956
|
import os, struct, json, zlib, shutil
print("SKUSCENE ENCRYPTOR BY: JACKLSUMMER15")
with open("input.json") as f:
sku=json.load(f)
mapnames=sku[0]["songs"]
map_count=0
try:
os.mkdir('output')
except:
pass
skuenc=open("output/skuscene_maps_pc_all.isc.ckd","wb")
skudb="skuscene_db"
skubasetpl="skuscene_base.tpl"
skubasepath="world/skuscenes/"
skuenc.write(b'\x00\x00\x00\x01\x00\x02\x6C\xD2\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')
skuenc.write(struct.pack(">I",1+len(mapnames)))
skuenc.write(b'\x97\xCA\x62\x8B\x00\x00\x00\x00\x3F\x80\x00\x00\x3F\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0B\x73\x6B\x75\x73\x63\x65\x6E\x65\x5F\x64\x62\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xFF\xFF\xFF\xFF\x00\x00\x00\x00\x00\x00\x00\x11\x73\x6B\x75\x73\x63\x65\x6E\x65\x5F\x62\x61\x73\x65\x2E\x74\x70\x6C\x00\x00\x00\x10\x77\x6F\x72\x6C\x64\x2F\x73\x6B\x75\x73\x63\x65\x6E\x65\x73\x2F\x0C\x1C\x9B\x77\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x40\x55\x79\xFB')
for i, mapname in enumerate(mapnames,1):
mapnamelow=mapname.lower()
print("adding "+mapname+"...")
songdesctpl="songdesc.tpl"
songdescpath="world/jd2015/"+mapnamelow+"/"
skuenc.write(b'\x97\xCA\x62\x8B\x00\x00\x00\x00\x3F\x80\x00\x00\x3F\x80\x00\x00\x00\x00\x00\x00')
skuenc.write(struct.pack(">I",len(mapname))+mapname.encode())
skuenc.write(b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xFF\xFF\xFF\xFF\x00\x00\x00\x00')
skuenc.write(struct.pack(">I",len(songdesctpl))+songdesctpl.encode())
skuenc.write(struct.pack(">I",len(songdescpath))+songdescpath.encode()+struct.pack("<I",zlib.crc32(songdescpath.encode())))
skuenc.write(b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\xE0\x7F\xCC\x3F')
map_count+=1
skuenc.write(b'\x00\x00\x00\x00\x00\x00\x00\x00')
skuenc.write(b'\x00\x00\x00\x01\xF8\x78\xDC\x2D'+struct.pack(">I",len(sku[0]["sku"]))+sku[0]["sku"].encode()+struct.pack(">I",len(sku[0]["territory"]))+sku[0]["territory"].encode()+b'\x00\x00\x00\x13\x72\x61\x74\x69\x6E\x67\x5F\x65\x73\x72\x62\x5F\x31\x36\x39\x2E\x69\x73\x63\x00\x00\x00\x2D\x77\x6F\x72\x6C\x64\x2F\x6A\x64\x32\x30\x31\x35\x2F\x5F\x75\x69\x2F\x73\x63\x72\x65\x65\x6E\x73\x2F\x62\x6F\x6F\x74\x73\x65\x71\x75\x65\x6E\x63\x65\x2F\x72\x61\x74\x69\x6E\x67\x2F\xF8\x41\x4C\x62\x00\x00\x00\x00')
skuenc.write(struct.pack(">I",len(mapnames)))
for i, mapname in enumerate(mapnames,1):
mapnamelow=mapname.lower()
covergenericact=mapnamelow+"_cover_generic.act"
menuartpath="world/jd2015/"+mapnamelow+"/menuart/actors/"
skuenc.write(struct.pack(">I",len(mapname))+mapname.encode())
skuenc.write(struct.pack(">I",len(covergenericact))+covergenericact.encode())
skuenc.write(struct.pack(">I",len(menuartpath))+menuartpath.encode()+struct.pack("<I",zlib.crc32(menuartpath.encode())))
skuenc.write(b'\x00\x00\x00\x00\x00\x00\x00\x00')
map_count+=1
skuenc.write(b'\x00\x00\x00\x00')
skuenc.close()
| true
| true
|
1c48041a87f8e551a67a3f06553241f52dcb0066
| 32
|
py
|
Python
|
venv/Lib/site-packages/pdoc/test/example_pkg/_relative_import/__init__.py
|
StavromularBeta/Rover
|
3030f1521e5a6bc2c6722983ca59a008b3a11400
|
[
"MIT"
] | null | null | null |
venv/Lib/site-packages/pdoc/test/example_pkg/_relative_import/__init__.py
|
StavromularBeta/Rover
|
3030f1521e5a6bc2c6722983ca59a008b3a11400
|
[
"MIT"
] | null | null | null |
venv/Lib/site-packages/pdoc/test/example_pkg/_relative_import/__init__.py
|
StavromularBeta/Rover
|
3030f1521e5a6bc2c6722983ca59a008b3a11400
|
[
"MIT"
] | 1
|
2021-02-22T13:55:32.000Z
|
2021-02-22T13:55:32.000Z
|
from . import foo # noqa: F401
| 16
| 31
| 0.65625
|
from . import foo
| true
| true
|
1c4804828254777e5f84787506bd650e8ff713f5
| 785
|
py
|
Python
|
invenio_pure_sync/tests/run_process.py
|
utnapischtim/pure_sync_rdm
|
9d465d4f1a2410ba83d09ca691c1655e7daaf113
|
[
"MIT"
] | null | null | null |
invenio_pure_sync/tests/run_process.py
|
utnapischtim/pure_sync_rdm
|
9d465d4f1a2410ba83d09ca691c1655e7daaf113
|
[
"MIT"
] | null | null | null |
invenio_pure_sync/tests/run_process.py
|
utnapischtim/pure_sync_rdm
|
9d465d4f1a2410ba83d09ca691c1655e7daaf113
|
[
"MIT"
] | null | null | null |
import os
command = '/home/bootcamp/.local/share/virtualenvs/pure_sync_rdm-fOqjLk38/bin/python /home/bootcamp/src/pure_sync_rdm/invenio_pure_sync/cli.py '
# os.system(command + 'pages --pageStart=1 --pageEnd=2 --pageSize=1')
os.system(command + 'pages --pageStart=7 --pageEnd=8 --pageSize=2')
# os.system(command + "group_split --oldGroup=2267 --newGroups='13866 19428'")
# os.system(command + "group_merge --oldGroups='13866 19428' --newGroup=2267")
# os.system(command + 'logs')
# os.system(command + 'owners_list')
# os.system(command + "owner --identifier='externalId'")
# os.system(command + 'changes')
# os.system(command + 'pure_import')
# os.system(command + 'delete')
# os.system(command + 'uuid')
# os.system(command + 'duplicates')
# os.system(command + 'owner_orcid')
| 41.315789
| 144
| 0.713376
|
import os
command = '/home/bootcamp/.local/share/virtualenvs/pure_sync_rdm-fOqjLk38/bin/python /home/bootcamp/src/pure_sync_rdm/invenio_pure_sync/cli.py '
os.system(command + 'pages --pageStart=7 --pageEnd=8 --pageSize=2')
| true
| true
|
1c48057c53b2997b5f312ffc1cc0ad73366dcfdd
| 8,528
|
py
|
Python
|
spira/lpe/structure.py
|
cloudcalvin/spira
|
2dcaef188f2bc8c3839e1b5ff0be027e0cd4908c
|
[
"MIT"
] | null | null | null |
spira/lpe/structure.py
|
cloudcalvin/spira
|
2dcaef188f2bc8c3839e1b5ff0be027e0cd4908c
|
[
"MIT"
] | 1
|
2021-10-17T10:18:04.000Z
|
2021-10-17T10:18:04.000Z
|
spira/lpe/structure.py
|
cloudcalvin/spira
|
2dcaef188f2bc8c3839e1b5ff0be027e0cd4908c
|
[
"MIT"
] | null | null | null |
import spira
from spira import param
from spira import shapes
from spira.lpe.layers import *
from spira.lrc.rules import *
from spira.lrc.checking import Rules
from spira.lpe.containers import __CellContainer__
from spira.lne.graph import Graph
from spira.lne.mesh import Mesh
from spira.lne.geometry import Geometry
RDD = spira.get_rule_deck()
class ComposeMLayers(__CellContainer__):
"""
Decorates all elementals with purpose metal with
LCells and add them as elementals to the new class.
"""
cell_elems = param.ElementListField()
mlayers = param.DataField(fdef_name='create_mlayers')
def _merge_layers(self, flat_metals):
points = []
elems = spira.ElementList()
for p in flat_metals:
for pp in p.polygons:
points.append(pp)
if points:
from spira.gdsii.utils import scale_polygon_down as spd
points = spd(points)
shape = shapes.Shape(points=points)
shape.apply_merge
for pts in shape.points:
pts = spd([pts])
elems += spira.Polygons(shape=pts)
return elems
def create_mlayers(self):
elems = spira.ElementList()
# players = RDD.PLAYER.get_physical_layers(purpose_symbol=['METAL', 'GROUND', 'MOAT'])
flat_elems = self.cell_elems.flat_copy()
for pl in RDD.PLAYER.get_physical_layers(purposes='METAL'):
metal_elems = flat_elems.get_polygons(layer=pl.layer)
if metal_elems:
c_mlayer = CMLayers(layer=pl.layer)
for i, ply in enumerate(self._merge_layers(metal_elems)):
ml = MLayer(name='MLayer_{}_{}_{}_{}'.format(pl.layer.number,
self.cell.name,
self.cell.id, i),
points=ply.polygons,
number=pl.layer.number)
c_mlayer += spira.SRef(ml)
elems += spira.SRef(c_mlayer)
return elems
def create_elementals(self, elems):
# TODO: Apply DRC checking between metals, before being placed.
for lcell in self.mlayers:
elems += lcell
# FIXME: Allow this operation.
# elems += self.mlayers
return elems
class ComposeNLayer(ComposeMLayers):
"""
Decorates all elementas with purpose via with
LCells and add them as elementals to the new class.
"""
cell_elems = param.ElementListField()
level = param.IntegerField(default=1)
nlayers = param.DataField(fdef_name='create_nlayers')
def create_nlayers(self):
elems = ElementList()
flat_elems = self.cell_elems.flat_copy()
for pl in RDD.PLAYER.get_physical_layers(purposes='VIA'):
via_elems = flat_elems.get_polygons(layer=pl.layer)
if via_elems:
c_nlayer = CNLayers(layer=pl.layer)
for i, ply in enumerate(via_elems):
ml = NLayer(name='Via_NLayer_{}_{}_{}'.format(pl.layer.number, self.cell.name, i),
points=ply.polygons,
midpoint=ply.center,
number=pl.layer.number)
c_nlayer += spira.SRef(ml)
elems += SRef(c_nlayer)
return elems
def create_elementals(self, elems):
super().create_elementals(elems)
# Only add it if its a Device.
if self.level == 1:
for lcell in self.nlayers:
elems += lcell
return elems
class ComposeGLayer(ComposeNLayer):
plane_elems = param.ElementListField() # Elementals like skyplanes and groundplanes.
ground_layer = param.DataField(fdef_name='create_merged_ground_layers')
def create_merged_ground_layers(self):
points = []
for p in self.plane_elems.flat_copy():
for pp in p.polygons:
points.append(pp)
if points:
ll = Layer(number=RDD.GDSII.GPLAYER, datatype=6)
merged_ply = UnionPolygons(polygons=points, gdslayer=ll)
return merged_ply
return None
def create_elementals(self, elems):
super().create_elementals(elems)
if self.level == 1:
if self.ground_layer:
box = self.cell.bbox
# box.move(midpoint=box.center, destination=(0,0))
gnd = self.ground_layer | box
if gnd:
c_glayer = CGLayers(layer=gnd.gdslayer)
name = 'GLayer_{}_{}'.format(self.cell.name, gnd.gdslayer.number)
gnd_layer = GLayer(name=name, layer=gnd.gdslayer, player=gnd)
c_glayer += spira.SRef(gnd_layer)
elems += spira.SRef(c_glayer)
return elems
class ConnectDesignRules(ComposeGLayer):
metal_elems = param.ElementListField()
def create_elementals(self, elems):
super().create_elementals(elems)
incorrect_elems = ElementList()
correct_elems = ElementList()
for rule in RDD.RULES.elementals:
if not rule.apply(elems):
for composed_lcell in elems:
for lcell in composed_lcell.ref.elementals.sref:
if lcell.ref.layer.number == rule.layer1.number:
correct_elems += lcell
return elems
class __StructureCell__(ConnectDesignRules):
"""
Add a GROUND bbox to Device for primitive and
DRC detection, since GROUND is only in Mask Cell.
"""
level = param.IntegerField(default=1)
device_elems = param.ElementListField()
devices = param.DataField(fdef_name='create_device_layers')
terminals = param.DataField(fdef_name='create_terminal_layers')
def create_device_layers(self):
box = self.cell.bbox
box.move(midpoint=box.center, destination=(0,0))
B = DLayer(blayer=box, device_elems=self.cell.elementals)
Bs = SRef(B)
Bs.move(midpoint=(0,0), destination=self.cell.bbox.center)
return Bs
def create_terminal_layers(self):
# flat_elems = self.cell_elems.flat_copy()
# port_elems = flat_elems.get_polygons(layer=RDD.PURPOSE.TERM)
# label_elems = flat_elems.labels
#
# elems = ElementList()
# for port in port_elems:
# for label in label_elems:
#
# lbls = label.text.split(' ')
# s_p1, s_p2 = lbls[1], lbls[2]
# p1, p2 = None, None
#
# if s_p1 in RDD.METALS.keys:
# layer = RDD.METALS[s_p1].LAYER
# p1 = spira.Layer(name=lbls[0], number=layer, datatype=RDD.GDSII.TEXT)
#
# if s_p2 in RDD.METALS.keys:
# layer = RDD.METALS[s_p2].LAYER
# p2 = spira.Layer(name=lbls[0], number=layer, datatype=RDD.GDSII.TEXT)
#
# if p1 and p2:
# if label.point_inside(polygon=port.polygons[0]):
# term = TLayer(points=port.polygons,
# layer1=p1,
# layer2=p2,
# number=RDD.GDSII.TERM,
# midpoint=label.position)
#
# term.ports[0].name = 'P1_{}'.format(label.text)
# term.ports[1].name = 'P2_{}'.format(label.text)
#
# elems += SRef(term)
elems = ElementList()
for p in self.cell.ports:
if isinstance(p, spira.Term):
term = TLayer(points=p.polygon.polygons,
# layer1=p1,
# layer2=p2,
number=RDD.PURPOSE.TERM.datatype,
midpoint=p.label.position)
term.ports[0].name = 'P1_{}'.format(1)
term.ports[1].name = 'P2_{}'.format(2)
elems += SRef(term)
return elems
def create_elementals(self, elems):
super().create_elementals(elems)
# elems += self.devices
# for term in self.terminals:
# elems += term
return elems
def create_ports(self, ports):
# for t in self.cell.terms:
# ports += t
return ports
| 30.787004
| 102
| 0.553471
|
import spira
from spira import param
from spira import shapes
from spira.lpe.layers import *
from spira.lrc.rules import *
from spira.lrc.checking import Rules
from spira.lpe.containers import __CellContainer__
from spira.lne.graph import Graph
from spira.lne.mesh import Mesh
from spira.lne.geometry import Geometry
RDD = spira.get_rule_deck()
class ComposeMLayers(__CellContainer__):
cell_elems = param.ElementListField()
mlayers = param.DataField(fdef_name='create_mlayers')
def _merge_layers(self, flat_metals):
points = []
elems = spira.ElementList()
for p in flat_metals:
for pp in p.polygons:
points.append(pp)
if points:
from spira.gdsii.utils import scale_polygon_down as spd
points = spd(points)
shape = shapes.Shape(points=points)
shape.apply_merge
for pts in shape.points:
pts = spd([pts])
elems += spira.Polygons(shape=pts)
return elems
def create_mlayers(self):
elems = spira.ElementList()
flat_elems = self.cell_elems.flat_copy()
for pl in RDD.PLAYER.get_physical_layers(purposes='METAL'):
metal_elems = flat_elems.get_polygons(layer=pl.layer)
if metal_elems:
c_mlayer = CMLayers(layer=pl.layer)
for i, ply in enumerate(self._merge_layers(metal_elems)):
ml = MLayer(name='MLayer_{}_{}_{}_{}'.format(pl.layer.number,
self.cell.name,
self.cell.id, i),
points=ply.polygons,
number=pl.layer.number)
c_mlayer += spira.SRef(ml)
elems += spira.SRef(c_mlayer)
return elems
def create_elementals(self, elems):
for lcell in self.mlayers:
elems += lcell
return elems
class ComposeNLayer(ComposeMLayers):
cell_elems = param.ElementListField()
level = param.IntegerField(default=1)
nlayers = param.DataField(fdef_name='create_nlayers')
def create_nlayers(self):
elems = ElementList()
flat_elems = self.cell_elems.flat_copy()
for pl in RDD.PLAYER.get_physical_layers(purposes='VIA'):
via_elems = flat_elems.get_polygons(layer=pl.layer)
if via_elems:
c_nlayer = CNLayers(layer=pl.layer)
for i, ply in enumerate(via_elems):
ml = NLayer(name='Via_NLayer_{}_{}_{}'.format(pl.layer.number, self.cell.name, i),
points=ply.polygons,
midpoint=ply.center,
number=pl.layer.number)
c_nlayer += spira.SRef(ml)
elems += SRef(c_nlayer)
return elems
def create_elementals(self, elems):
super().create_elementals(elems)
if self.level == 1:
for lcell in self.nlayers:
elems += lcell
return elems
class ComposeGLayer(ComposeNLayer):
plane_elems = param.ElementListField()
ground_layer = param.DataField(fdef_name='create_merged_ground_layers')
def create_merged_ground_layers(self):
points = []
for p in self.plane_elems.flat_copy():
for pp in p.polygons:
points.append(pp)
if points:
ll = Layer(number=RDD.GDSII.GPLAYER, datatype=6)
merged_ply = UnionPolygons(polygons=points, gdslayer=ll)
return merged_ply
return None
def create_elementals(self, elems):
super().create_elementals(elems)
if self.level == 1:
if self.ground_layer:
box = self.cell.bbox
gnd = self.ground_layer | box
if gnd:
c_glayer = CGLayers(layer=gnd.gdslayer)
name = 'GLayer_{}_{}'.format(self.cell.name, gnd.gdslayer.number)
gnd_layer = GLayer(name=name, layer=gnd.gdslayer, player=gnd)
c_glayer += spira.SRef(gnd_layer)
elems += spira.SRef(c_glayer)
return elems
class ConnectDesignRules(ComposeGLayer):
metal_elems = param.ElementListField()
def create_elementals(self, elems):
super().create_elementals(elems)
incorrect_elems = ElementList()
correct_elems = ElementList()
for rule in RDD.RULES.elementals:
if not rule.apply(elems):
for composed_lcell in elems:
for lcell in composed_lcell.ref.elementals.sref:
if lcell.ref.layer.number == rule.layer1.number:
correct_elems += lcell
return elems
class __StructureCell__(ConnectDesignRules):
level = param.IntegerField(default=1)
device_elems = param.ElementListField()
devices = param.DataField(fdef_name='create_device_layers')
terminals = param.DataField(fdef_name='create_terminal_layers')
def create_device_layers(self):
box = self.cell.bbox
box.move(midpoint=box.center, destination=(0,0))
B = DLayer(blayer=box, device_elems=self.cell.elementals)
Bs = SRef(B)
Bs.move(midpoint=(0,0), destination=self.cell.bbox.center)
return Bs
def create_terminal_layers(self):
elems = ElementList()
for p in self.cell.ports:
if isinstance(p, spira.Term):
term = TLayer(points=p.polygon.polygons,
number=RDD.PURPOSE.TERM.datatype,
midpoint=p.label.position)
term.ports[0].name = 'P1_{}'.format(1)
term.ports[1].name = 'P2_{}'.format(2)
elems += SRef(term)
return elems
def create_elementals(self, elems):
super().create_elementals(elems)
return elems
def create_ports(self, ports):
return ports
| true
| true
|
1c48059513bd64cd768042db01ce8f2c7d15dfeb
| 1,009
|
py
|
Python
|
hooks/post_gen_project.py
|
christophedcpm/cookiecutter-pypackage
|
eaad44b1ae7c049e9e2a868b3b15164cceaf55e2
|
[
"BSD-3-Clause"
] | null | null | null |
hooks/post_gen_project.py
|
christophedcpm/cookiecutter-pypackage
|
eaad44b1ae7c049e9e2a868b3b15164cceaf55e2
|
[
"BSD-3-Clause"
] | null | null | null |
hooks/post_gen_project.py
|
christophedcpm/cookiecutter-pypackage
|
eaad44b1ae7c049e9e2a868b3b15164cceaf55e2
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
import os
PROJECT_DIRECTORY = os.path.realpath(os.path.curdir)
def remove_file(filepath):
os.remove(os.path.join(PROJECT_DIRECTORY, filepath))
def remove_dir(filepath):
os.removedirs(os.path.join(PROJECT_DIRECTORY, filepath))
if __name__ == "__main__":
if "{{ cookiecutter.create_author_file }}" != "y":
remove_file("AUTHORS.rst")
remove_file("docs/authors.rst")
if "{{ cookiecutter.ci_provider }}" == "Github Action":
remove_file(".travis.yml")
if '{{cookiecutter.enable_automatic_pypi_deployment }}' == 'n':
remove_file('.github/workflows/publish.yml')
if "{{ cookiecutter.ci_provider }}" == "Travis-CI":
remove_dir(".github")
if "no" in "{{ cookiecutter.command_line_interface|lower }}":
cli_file = os.path.join("{{ cookiecutter.project_slug }}", "cli.py")
remove_file(cli_file)
if "Not open source" == "{{ cookiecutter.open_source_license }}":
remove_file("LICENSE")
| 28.828571
| 76
| 0.654113
|
import os
PROJECT_DIRECTORY = os.path.realpath(os.path.curdir)
def remove_file(filepath):
os.remove(os.path.join(PROJECT_DIRECTORY, filepath))
def remove_dir(filepath):
os.removedirs(os.path.join(PROJECT_DIRECTORY, filepath))
if __name__ == "__main__":
if "{{ cookiecutter.create_author_file }}" != "y":
remove_file("AUTHORS.rst")
remove_file("docs/authors.rst")
if "{{ cookiecutter.ci_provider }}" == "Github Action":
remove_file(".travis.yml")
if '{{cookiecutter.enable_automatic_pypi_deployment }}' == 'n':
remove_file('.github/workflows/publish.yml')
if "{{ cookiecutter.ci_provider }}" == "Travis-CI":
remove_dir(".github")
if "no" in "{{ cookiecutter.command_line_interface|lower }}":
cli_file = os.path.join("{{ cookiecutter.project_slug }}", "cli.py")
remove_file(cli_file)
if "Not open source" == "{{ cookiecutter.open_source_license }}":
remove_file("LICENSE")
| true
| true
|
1c4807c8c7b1516bf451ce708684235b30b64ee7
| 2,410
|
py
|
Python
|
mayan/apps/linking/links.py
|
garrans/mayan-edms
|
e95e90cc47447a1ae72629271652824aa9868572
|
[
"Apache-2.0"
] | null | null | null |
mayan/apps/linking/links.py
|
garrans/mayan-edms
|
e95e90cc47447a1ae72629271652824aa9868572
|
[
"Apache-2.0"
] | null | null | null |
mayan/apps/linking/links.py
|
garrans/mayan-edms
|
e95e90cc47447a1ae72629271652824aa9868572
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import unicode_literals
from django.utils.translation import ugettext_lazy as _
from documents.permissions import permission_document_view
from navigation import Link
from .permissions import (
permission_smart_link_create, permission_smart_link_delete,
permission_smart_link_edit, permission_smart_link_view
)
link_smart_link_condition_create = Link(
permissions=(permission_smart_link_edit,), text=_('Create condition'),
view='linking:smart_link_condition_create', args='object.pk'
)
link_smart_link_condition_delete = Link(
permissions=(permission_smart_link_edit,), tags='dangerous',
text=_('Delete'), view='linking:smart_link_condition_delete',
args='resolved_object.pk'
)
link_smart_link_condition_edit = Link(
permissions=(permission_smart_link_edit,), text=_('Edit'),
view='linking:smart_link_condition_edit', args='resolved_object.pk'
)
link_smart_link_condition_list = Link(
permissions=(permission_smart_link_edit,), text=_('Conditions'),
view='linking:smart_link_condition_list', args='object.pk'
)
link_smart_link_create = Link(
permissions=(permission_smart_link_create,),
text=_('Create new smart link'), view='linking:smart_link_create'
)
link_smart_link_delete = Link(
permissions=(permission_smart_link_delete,), tags='dangerous',
text=_('Delete'), view='linking:smart_link_delete', args='object.pk'
)
link_smart_link_document_types = Link(
permissions=(permission_smart_link_edit,), text=_('Document types'),
view='linking:smart_link_document_types', args='object.pk'
)
link_smart_link_edit = Link(
permissions=(permission_smart_link_edit,), text=_('Edit'),
view='linking:smart_link_edit', args='object.pk'
)
link_smart_link_instance_view = Link(
permissions=(permission_smart_link_view,), text=_('Documents'),
view='linking:smart_link_instance_view', args=(
'document.pk', 'object.pk',
)
)
link_smart_link_instances_for_document = Link(
permissions=(permission_document_view,), text=_('Smart links'),
view='linking:smart_link_instances_for_document', args='object.pk'
)
link_smart_link_list = Link(
permissions=(permission_smart_link_create,), text=_('Smart links'),
view='linking:smart_link_list'
)
link_smart_link_setup = Link(
icon='fa fa-link', permissions=(permission_smart_link_create,),
text=_('Smart links'), view='linking:smart_link_list'
)
| 37.65625
| 74
| 0.772199
|
from __future__ import unicode_literals
from django.utils.translation import ugettext_lazy as _
from documents.permissions import permission_document_view
from navigation import Link
from .permissions import (
permission_smart_link_create, permission_smart_link_delete,
permission_smart_link_edit, permission_smart_link_view
)
link_smart_link_condition_create = Link(
permissions=(permission_smart_link_edit,), text=_('Create condition'),
view='linking:smart_link_condition_create', args='object.pk'
)
link_smart_link_condition_delete = Link(
permissions=(permission_smart_link_edit,), tags='dangerous',
text=_('Delete'), view='linking:smart_link_condition_delete',
args='resolved_object.pk'
)
link_smart_link_condition_edit = Link(
permissions=(permission_smart_link_edit,), text=_('Edit'),
view='linking:smart_link_condition_edit', args='resolved_object.pk'
)
link_smart_link_condition_list = Link(
permissions=(permission_smart_link_edit,), text=_('Conditions'),
view='linking:smart_link_condition_list', args='object.pk'
)
link_smart_link_create = Link(
permissions=(permission_smart_link_create,),
text=_('Create new smart link'), view='linking:smart_link_create'
)
link_smart_link_delete = Link(
permissions=(permission_smart_link_delete,), tags='dangerous',
text=_('Delete'), view='linking:smart_link_delete', args='object.pk'
)
link_smart_link_document_types = Link(
permissions=(permission_smart_link_edit,), text=_('Document types'),
view='linking:smart_link_document_types', args='object.pk'
)
link_smart_link_edit = Link(
permissions=(permission_smart_link_edit,), text=_('Edit'),
view='linking:smart_link_edit', args='object.pk'
)
link_smart_link_instance_view = Link(
permissions=(permission_smart_link_view,), text=_('Documents'),
view='linking:smart_link_instance_view', args=(
'document.pk', 'object.pk',
)
)
link_smart_link_instances_for_document = Link(
permissions=(permission_document_view,), text=_('Smart links'),
view='linking:smart_link_instances_for_document', args='object.pk'
)
link_smart_link_list = Link(
permissions=(permission_smart_link_create,), text=_('Smart links'),
view='linking:smart_link_list'
)
link_smart_link_setup = Link(
icon='fa fa-link', permissions=(permission_smart_link_create,),
text=_('Smart links'), view='linking:smart_link_list'
)
| true
| true
|
1c4807f83ad4b502ef47f691ad2b1873e34b1f90
| 2,506
|
py
|
Python
|
bot.py
|
chenx6/sec_bot
|
5ad3427ed62f6dd891bb03afc5b2ebf93ccbb625
|
[
"MIT"
] | 2
|
2021-02-03T05:36:05.000Z
|
2022-01-20T05:42:46.000Z
|
bot.py
|
chenx6/sec_bot
|
5ad3427ed62f6dd891bb03afc5b2ebf93ccbb625
|
[
"MIT"
] | null | null | null |
bot.py
|
chenx6/sec_bot
|
5ad3427ed62f6dd891bb03afc5b2ebf93ccbb625
|
[
"MIT"
] | null | null | null |
from typing import List
from aiocqhttp import CQHttp, Event
from quart import request
from schedule import every
from config import webhook_token, subscribes
from utils.limit_counter import LimitCounter
from utils.schedule_thread import run_continuously
from plugin import (silent, base_bot, anquanke_vuln, ctfhub, daily_push, help_menu,
whoami, rss, search, admin, unknown_message, lsp, weather, debian_pkg)
silent_ = silent.Silent()
plugins: List[base_bot.BaseBot] = [
silent_,
anquanke_vuln.AnquankeVuln(),
ctfhub.CTFHub(),
daily_push.DailyPush(),
rss.Rss(),
help_menu.HelpMenu(),
whoami.WhoAmI(),
search.SearchBot(),
weather.Weather(),
debian_pkg.DebianPkgBot(),
lsp.LSP(),
admin.Admin(),
unknown_message.Unknown()
]
bot = CQHttp()
logger = bot.logger
counter = LimitCounter()
@bot.on_message('group')
async def reply_at(event: Event):
"""
反馈 at 消息
"""
if silent_.is_silent(event, event.message):
return
if not counter.can_send():
await bot.send(event, f'发送的太快了吧,{event.sender["nickname"]},让我缓缓(>﹏<)')
return
for plugin in plugins:
if not event.message:
break
if plugin.match(event, event.message):
try:
reply_text = await plugin.reply(event)
await bot.send(event, reply_text)
counter.add_counter()
except Exception as e:
logger.error('Plugin error')
logger.error(e)
break
@bot.server_app.route('/webhook')
async def webhook():
token = request.args.get('token')
group_id = request.args.get('group_id')
message = request.args.get('message')
if not token or token != webhook_token:
return {"message": "token error"}, 400
if not group_id or not message:
return {"message": "error while missing argument"}, 400
group_id = int(group_id)
try:
response = await bot.send_group_msg(group_id=group_id,
message=message) # type: ignore
return response
except Exception as e:
return {"message": "Server error, " + str(e)}, 500
def reset_counter():
counter.reset_counter()
@bot.before_sending
async def can_send_word(event: Event, message, kwargs):
if silent_.is_silent():
event.clear()
for sub in subscribes:
sub.job.do(sub.send_message, bot=bot)
every().minutes.do(reset_counter)
run_continuously(60)
| 28.157303
| 90
| 0.640463
|
from typing import List
from aiocqhttp import CQHttp, Event
from quart import request
from schedule import every
from config import webhook_token, subscribes
from utils.limit_counter import LimitCounter
from utils.schedule_thread import run_continuously
from plugin import (silent, base_bot, anquanke_vuln, ctfhub, daily_push, help_menu,
whoami, rss, search, admin, unknown_message, lsp, weather, debian_pkg)
silent_ = silent.Silent()
plugins: List[base_bot.BaseBot] = [
silent_,
anquanke_vuln.AnquankeVuln(),
ctfhub.CTFHub(),
daily_push.DailyPush(),
rss.Rss(),
help_menu.HelpMenu(),
whoami.WhoAmI(),
search.SearchBot(),
weather.Weather(),
debian_pkg.DebianPkgBot(),
lsp.LSP(),
admin.Admin(),
unknown_message.Unknown()
]
bot = CQHttp()
logger = bot.logger
counter = LimitCounter()
@bot.on_message('group')
async def reply_at(event: Event):
if silent_.is_silent(event, event.message):
return
if not counter.can_send():
await bot.send(event, f'发送的太快了吧,{event.sender["nickname"]},让我缓缓(>﹏<)')
return
for plugin in plugins:
if not event.message:
break
if plugin.match(event, event.message):
try:
reply_text = await plugin.reply(event)
await bot.send(event, reply_text)
counter.add_counter()
except Exception as e:
logger.error('Plugin error')
logger.error(e)
break
@bot.server_app.route('/webhook')
async def webhook():
token = request.args.get('token')
group_id = request.args.get('group_id')
message = request.args.get('message')
if not token or token != webhook_token:
return {"message": "token error"}, 400
if not group_id or not message:
return {"message": "error while missing argument"}, 400
group_id = int(group_id)
try:
response = await bot.send_group_msg(group_id=group_id,
message=message)
return response
except Exception as e:
return {"message": "Server error, " + str(e)}, 500
def reset_counter():
counter.reset_counter()
@bot.before_sending
async def can_send_word(event: Event, message, kwargs):
if silent_.is_silent():
event.clear()
for sub in subscribes:
sub.job.do(sub.send_message, bot=bot)
every().minutes.do(reset_counter)
run_continuously(60)
| true
| true
|
1c48084185e3f160708426d640c31a058f31937b
| 811
|
py
|
Python
|
jesse/indicators/rocp.py
|
The-Makers-of-things/jesse
|
df061ea21011a3c28f3359f421ec5594216fb708
|
[
"MIT"
] | null | null | null |
jesse/indicators/rocp.py
|
The-Makers-of-things/jesse
|
df061ea21011a3c28f3359f421ec5594216fb708
|
[
"MIT"
] | null | null | null |
jesse/indicators/rocp.py
|
The-Makers-of-things/jesse
|
df061ea21011a3c28f3359f421ec5594216fb708
|
[
"MIT"
] | null | null | null |
from typing import Union
import numpy as np
import talib
from jesse.helpers import get_candle_source
def rocp(candles: np.ndarray, period: int = 10, source_type: str = "close", sequential: bool = False) -> Union[
float, np.ndarray]:
"""
ROCP - Rate of change Percentage: (price-prevPrice)/prevPrice
:param candles: np.ndarray
:param period: int - default=10
:param source_type: str - default: "close"
:param sequential: bool - default=False
:return: float | np.ndarray
"""
if not sequential and len(candles) > 240:
candles = candles[-240:]
source = get_candle_source(candles, source_type=source_type)
res = talib.ROCP(source, timeperiod=period)
if sequential:
return res
else:
return None if np.isnan(res[-1]) else res[-1]
| 26.16129
| 111
| 0.668311
|
from typing import Union
import numpy as np
import talib
from jesse.helpers import get_candle_source
def rocp(candles: np.ndarray, period: int = 10, source_type: str = "close", sequential: bool = False) -> Union[
float, np.ndarray]:
if not sequential and len(candles) > 240:
candles = candles[-240:]
source = get_candle_source(candles, source_type=source_type)
res = talib.ROCP(source, timeperiod=period)
if sequential:
return res
else:
return None if np.isnan(res[-1]) else res[-1]
| true
| true
|
1c48092d6bf94bc4d8938233cb16fd7a0011d89c
| 4,396
|
py
|
Python
|
tests/test_cli.py
|
rddunphy/pwg
|
47ed13d3a8120e2c21e4ff28af08deeddbbb9d66
|
[
"MIT"
] | null | null | null |
tests/test_cli.py
|
rddunphy/pwg
|
47ed13d3a8120e2c21e4ff28af08deeddbbb9d66
|
[
"MIT"
] | null | null | null |
tests/test_cli.py
|
rddunphy/pwg
|
47ed13d3a8120e2c21e4ff28af08deeddbbb9d66
|
[
"MIT"
] | null | null | null |
from io import StringIO
from unittest import TestCase
from unittest.mock import patch
from generator.cli import create_parser, gen, confirm, munge, reset, add_chars, remove_chars, save, pronounceable, \
phrase
class CLITest(TestCase):
def setUp(self):
self.parser = create_parser()
@patch("builtins.input", side_effect=['n', 'N'])
def test_confirm_no(self, _):
answer = confirm("yes?")
self.assertFalse(answer)
answer = confirm("yes?")
self.assertFalse(answer)
@patch("builtins.input", side_effect=['y', ''])
def test_confirm_yes(self, _):
answer = confirm("yes?")
self.assertTrue(answer)
answer = confirm("yes?")
self.assertTrue(answer)
@patch("sys.stdout", new_callable=StringIO)
def test_gen(self, mock_stdout):
args = self.parser.parse_args(["-p", "xxx"])
gen(args)
result = mock_stdout.getvalue().strip()
self.assertEqual(len(result), 3)
@patch("pyperclip.copy")
@patch("sys.stdout", new_callable=StringIO)
def test_gen_copy(self, mock_stdout, _):
args = self.parser.parse_args(["-c", "-p", "xxx"])
gen(args)
result = mock_stdout.getvalue().strip()
self.assertEqual(result, "Password copied to clipboard.")
@patch("sys.stdout", new_callable=StringIO)
def test_pronounceable(self, mock_stdout):
args = self.parser.parse_args(["pronounceable", "-l", "8"])
pronounceable(args)
result = mock_stdout.getvalue().strip()
self.assertEqual(len(result), 8)
@patch("sys.stdout", new_callable=StringIO)
def test_phrase(self, mock_stdout):
args = self.parser.parse_args(["phrase", "-p", "wwavn"])
phrase(args)
result = mock_stdout.getvalue().strip()
upper_count = sum(1 for c in result if c.isupper())
self.assertEqual(upper_count, 5)
class ParserTest(TestCase):
def setUp(self):
self.parser = create_parser()
def test_no_args(self):
args = self.parser.parse_args([])
self.assertEqual(args.func, gen)
self.assertFalse(args.copy)
self.assertIsNone(args.pattern)
self.assertEqual(args.type, 'default')
self.assertFalse(args.munge)
def test_pattern_args(self):
args = self.parser.parse_args(["-c", "-p", "xxx"])
self.assertEqual(args.func, gen)
self.assertTrue(args.copy)
self.assertEqual(args.pattern, "xxx")
self.assertEqual(args.type, 'default')
self.assertFalse(args.munge)
def test_type_args(self):
args = self.parser.parse_args(["-m", "-t", "pin"])
self.assertEqual(args.func, gen)
self.assertFalse(args.copy)
self.assertIsNone(args.pattern)
self.assertEqual(args.type, 'pin')
self.assertTrue(args.munge)
def test_pronounceable_args(self):
args = self.parser.parse_args(["pronounceable", "-m", "-c", "-l", "15"])
self.assertEqual(args.func, pronounceable)
self.assertTrue(args.copy)
self.assertEqual(args.length, 15)
self.assertTrue(args.munge)
def test_phrase_args(self):
args = self.parser.parse_args(["phrase", "-m", "-c", "-p", "nn"])
self.assertEqual(args.func, phrase)
self.assertTrue(args.copy)
self.assertEqual(args.pattern, "nn")
self.assertTrue(args.munge)
def test_munge(self):
args = self.parser.parse_args(["munge", "mypassword"])
self.assertEqual(args.func, munge)
self.assertEqual(args.string, "mypassword")
def test_reset(self):
args = self.parser.parse_args(["reset"])
self.assertEqual(args.func, reset)
def test_add_chars(self):
args = self.parser.parse_args(["add_chars", "n", "xyz"])
self.assertEqual(args.func, add_chars)
self.assertEqual(args.cls, "n")
self.assertEqual(args.chars, "xyz")
def test_remove_chars(self):
args = self.parser.parse_args(["remove_chars", "n", "xyz"])
self.assertEqual(args.func, remove_chars)
self.assertEqual(args.cls, "n")
self.assertEqual(args.chars, "xyz")
def test_save(self):
args = self.parser.parse_args(["save", "mytype", "xxx"])
self.assertEqual(args.func, save)
self.assertEqual(args.name, "mytype")
self.assertEqual(args.pattern, "xxx")
| 34.34375
| 116
| 0.628071
|
from io import StringIO
from unittest import TestCase
from unittest.mock import patch
from generator.cli import create_parser, gen, confirm, munge, reset, add_chars, remove_chars, save, pronounceable, \
phrase
class CLITest(TestCase):
def setUp(self):
self.parser = create_parser()
@patch("builtins.input", side_effect=['n', 'N'])
def test_confirm_no(self, _):
answer = confirm("yes?")
self.assertFalse(answer)
answer = confirm("yes?")
self.assertFalse(answer)
@patch("builtins.input", side_effect=['y', ''])
def test_confirm_yes(self, _):
answer = confirm("yes?")
self.assertTrue(answer)
answer = confirm("yes?")
self.assertTrue(answer)
@patch("sys.stdout", new_callable=StringIO)
def test_gen(self, mock_stdout):
args = self.parser.parse_args(["-p", "xxx"])
gen(args)
result = mock_stdout.getvalue().strip()
self.assertEqual(len(result), 3)
@patch("pyperclip.copy")
@patch("sys.stdout", new_callable=StringIO)
def test_gen_copy(self, mock_stdout, _):
args = self.parser.parse_args(["-c", "-p", "xxx"])
gen(args)
result = mock_stdout.getvalue().strip()
self.assertEqual(result, "Password copied to clipboard.")
@patch("sys.stdout", new_callable=StringIO)
def test_pronounceable(self, mock_stdout):
args = self.parser.parse_args(["pronounceable", "-l", "8"])
pronounceable(args)
result = mock_stdout.getvalue().strip()
self.assertEqual(len(result), 8)
@patch("sys.stdout", new_callable=StringIO)
def test_phrase(self, mock_stdout):
args = self.parser.parse_args(["phrase", "-p", "wwavn"])
phrase(args)
result = mock_stdout.getvalue().strip()
upper_count = sum(1 for c in result if c.isupper())
self.assertEqual(upper_count, 5)
class ParserTest(TestCase):
def setUp(self):
self.parser = create_parser()
def test_no_args(self):
args = self.parser.parse_args([])
self.assertEqual(args.func, gen)
self.assertFalse(args.copy)
self.assertIsNone(args.pattern)
self.assertEqual(args.type, 'default')
self.assertFalse(args.munge)
def test_pattern_args(self):
args = self.parser.parse_args(["-c", "-p", "xxx"])
self.assertEqual(args.func, gen)
self.assertTrue(args.copy)
self.assertEqual(args.pattern, "xxx")
self.assertEqual(args.type, 'default')
self.assertFalse(args.munge)
def test_type_args(self):
args = self.parser.parse_args(["-m", "-t", "pin"])
self.assertEqual(args.func, gen)
self.assertFalse(args.copy)
self.assertIsNone(args.pattern)
self.assertEqual(args.type, 'pin')
self.assertTrue(args.munge)
def test_pronounceable_args(self):
args = self.parser.parse_args(["pronounceable", "-m", "-c", "-l", "15"])
self.assertEqual(args.func, pronounceable)
self.assertTrue(args.copy)
self.assertEqual(args.length, 15)
self.assertTrue(args.munge)
def test_phrase_args(self):
args = self.parser.parse_args(["phrase", "-m", "-c", "-p", "nn"])
self.assertEqual(args.func, phrase)
self.assertTrue(args.copy)
self.assertEqual(args.pattern, "nn")
self.assertTrue(args.munge)
def test_munge(self):
args = self.parser.parse_args(["munge", "mypassword"])
self.assertEqual(args.func, munge)
self.assertEqual(args.string, "mypassword")
def test_reset(self):
args = self.parser.parse_args(["reset"])
self.assertEqual(args.func, reset)
def test_add_chars(self):
args = self.parser.parse_args(["add_chars", "n", "xyz"])
self.assertEqual(args.func, add_chars)
self.assertEqual(args.cls, "n")
self.assertEqual(args.chars, "xyz")
def test_remove_chars(self):
args = self.parser.parse_args(["remove_chars", "n", "xyz"])
self.assertEqual(args.func, remove_chars)
self.assertEqual(args.cls, "n")
self.assertEqual(args.chars, "xyz")
def test_save(self):
args = self.parser.parse_args(["save", "mytype", "xxx"])
self.assertEqual(args.func, save)
self.assertEqual(args.name, "mytype")
self.assertEqual(args.pattern, "xxx")
| true
| true
|
1c480a65642aabc05cce235a50ba92cd155f5aa5
| 3,905
|
py
|
Python
|
controllers/ayarlar_controller.py
|
pyproject23/kackisivar
|
752438c51d0d6145ce3a385ca18a471d8b7f3013
|
[
"MIT"
] | 5
|
2020-12-13T20:09:34.000Z
|
2021-01-05T16:17:01.000Z
|
controllers/ayarlar_controller.py
|
pyproject23/kackisivar
|
752438c51d0d6145ce3a385ca18a471d8b7f3013
|
[
"MIT"
] | null | null | null |
controllers/ayarlar_controller.py
|
pyproject23/kackisivar
|
752438c51d0d6145ce3a385ca18a471d8b7f3013
|
[
"MIT"
] | 8
|
2020-12-13T19:09:26.000Z
|
2020-12-13T21:15:45.000Z
|
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtWidgets import QMessageBox
from views.ayarlar_form import Ui_AyarlarForm
from models.ayarlar import Ayarlar
from models.kullanici import Kullanici
# from datetime import time, datetime
class AyarlarForm(QtWidgets.QWidget, Ui_AyarlarForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.kullanici = Kullanici()
self.ayarlar = Ayarlar.ayarlari_getir()
self.setupUi()
def setupUi(self):
super().setupUi(self)
self.pushButton.clicked.connect(self.ayarlari_guncelle)
self.lineEditUsbId.setValidator(QtGui.QIntValidator(0, 1, self))
self.lineEditKisiLimiti.setValidator(QtGui.QIntValidator(0, 1000, self))
self.lineEditSmtpPortNo.setValidator(QtGui.QIntValidator(0, 1000, self))
self.verileri_doldur()
def verileri_doldur(self):
try:
self.ayarlar = Ayarlar.ayarlari_getir()
self.lineEditOkulAdi.setText(self.ayarlar[3])
self.comboBoxGorevli.clear()
kullanicilar = self.kullanici.verileri_getir()
self.comboBoxGorevli.addItem(self.ayarlar[1], self.ayarlar[4])
for k in kullanicilar:
self.comboBoxGorevli.addItem(k[3], k[0])
saat = str.split(self.ayarlar[5], ":")
saat = [int(i) for i in saat]
self.timeEditMailSaat.setTime(QtCore.QTime(*saat))
self.lineEditUsbId.setText(str(self.ayarlar[6]))
self.lineEditDemoVideo.setText(self.ayarlar[7])
self.lineEditKisiLimiti.setText(str(self.ayarlar[8]))
self.lineEditSmtpAdres.setText(self.ayarlar[9])
self.lineEditSmtpKulAdi.setText(self.ayarlar[10])
self.lineEditSmtpParola.setText(self.ayarlar[11])
self.lineEditSmtpPortNo.setText(str(self.ayarlar[12]))
self.checkBoxTLS.setChecked(self.ayarlar[13])
except Exception as e:
self.Mesaj(baslik="Hata", mesaj="Hata:" + str(e), ikon="hata")
def ayarlari_guncelle(self):
try:
okul = str.strip(self.lineEditOkulAdi.text())
gorevli_id = self.comboBoxGorevli.itemData(self.comboBoxGorevli.currentIndex())
mail_saati = self.timeEditMailSaat.text()
usb_id = str.strip(self.lineEditUsbId.text())
demo_video = str.strip(self.lineEditDemoVideo.text())
kisi_siniri = str.strip(self.lineEditKisiLimiti.text())
smtp_server_adres = str.strip(self.lineEditSmtpAdres.text())
smtp_kullanici_adi = str.strip(self.lineEditSmtpKulAdi.text())
smtp_kullanici_parola = str.strip(self.lineEditSmtpParola.text())
smtp_port_numarasi = str.strip(self.lineEditSmtpPortNo.text())
if self.checkBoxTLS.isChecked():
smtp_tls = 1
else:
smtp_tls = 0
Ayarlar.kaydet(id=1, okul_adi=okul, gorevli_id=gorevli_id, mail_gonderme_saati=mail_saati, usb_id=usb_id, demo_video=demo_video, kisi_siniri=kisi_siniri, smtp_server_adres=smtp_server_adres, smtp_kullanici_adi=smtp_kullanici_adi, smtp_kullanici_parola=smtp_kullanici_parola, smtp_port_numarasi=smtp_port_numarasi, smtp_tls=smtp_tls)
self.verileri_doldur()
# print(smtp_tls)
# print("güncelle")
except Exception as e:
self.Mesaj("Hata", "Kayıt işlemi gerçekleştirilemedi", "hata")
def Mesaj(self, baslik="", mesaj="", ikon="bilgi"):
msg1 = QMessageBox()
if (ikon == "bilgi"):
msg1.setIcon(QMessageBox.Information)
elif(ikon == "uyari"):
msg1.setIcon(QMessageBox.Warning)
else:
msg1.setIcon(QMessageBox.Critical)
msg1.setStyleSheet("background:#28595e;")
msg1.setWindowTitle(baslik)
msg1.setText(mesaj)
msg1.exec_()
| 47.048193
| 344
| 0.656338
|
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtWidgets import QMessageBox
from views.ayarlar_form import Ui_AyarlarForm
from models.ayarlar import Ayarlar
from models.kullanici import Kullanici
class AyarlarForm(QtWidgets.QWidget, Ui_AyarlarForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.kullanici = Kullanici()
self.ayarlar = Ayarlar.ayarlari_getir()
self.setupUi()
def setupUi(self):
super().setupUi(self)
self.pushButton.clicked.connect(self.ayarlari_guncelle)
self.lineEditUsbId.setValidator(QtGui.QIntValidator(0, 1, self))
self.lineEditKisiLimiti.setValidator(QtGui.QIntValidator(0, 1000, self))
self.lineEditSmtpPortNo.setValidator(QtGui.QIntValidator(0, 1000, self))
self.verileri_doldur()
def verileri_doldur(self):
try:
self.ayarlar = Ayarlar.ayarlari_getir()
self.lineEditOkulAdi.setText(self.ayarlar[3])
self.comboBoxGorevli.clear()
kullanicilar = self.kullanici.verileri_getir()
self.comboBoxGorevli.addItem(self.ayarlar[1], self.ayarlar[4])
for k in kullanicilar:
self.comboBoxGorevli.addItem(k[3], k[0])
saat = str.split(self.ayarlar[5], ":")
saat = [int(i) for i in saat]
self.timeEditMailSaat.setTime(QtCore.QTime(*saat))
self.lineEditUsbId.setText(str(self.ayarlar[6]))
self.lineEditDemoVideo.setText(self.ayarlar[7])
self.lineEditKisiLimiti.setText(str(self.ayarlar[8]))
self.lineEditSmtpAdres.setText(self.ayarlar[9])
self.lineEditSmtpKulAdi.setText(self.ayarlar[10])
self.lineEditSmtpParola.setText(self.ayarlar[11])
self.lineEditSmtpPortNo.setText(str(self.ayarlar[12]))
self.checkBoxTLS.setChecked(self.ayarlar[13])
except Exception as e:
self.Mesaj(baslik="Hata", mesaj="Hata:" + str(e), ikon="hata")
def ayarlari_guncelle(self):
try:
okul = str.strip(self.lineEditOkulAdi.text())
gorevli_id = self.comboBoxGorevli.itemData(self.comboBoxGorevli.currentIndex())
mail_saati = self.timeEditMailSaat.text()
usb_id = str.strip(self.lineEditUsbId.text())
demo_video = str.strip(self.lineEditDemoVideo.text())
kisi_siniri = str.strip(self.lineEditKisiLimiti.text())
smtp_server_adres = str.strip(self.lineEditSmtpAdres.text())
smtp_kullanici_adi = str.strip(self.lineEditSmtpKulAdi.text())
smtp_kullanici_parola = str.strip(self.lineEditSmtpParola.text())
smtp_port_numarasi = str.strip(self.lineEditSmtpPortNo.text())
if self.checkBoxTLS.isChecked():
smtp_tls = 1
else:
smtp_tls = 0
Ayarlar.kaydet(id=1, okul_adi=okul, gorevli_id=gorevli_id, mail_gonderme_saati=mail_saati, usb_id=usb_id, demo_video=demo_video, kisi_siniri=kisi_siniri, smtp_server_adres=smtp_server_adres, smtp_kullanici_adi=smtp_kullanici_adi, smtp_kullanici_parola=smtp_kullanici_parola, smtp_port_numarasi=smtp_port_numarasi, smtp_tls=smtp_tls)
self.verileri_doldur()
except Exception as e:
self.Mesaj("Hata", "Kayıt işlemi gerçekleştirilemedi", "hata")
def Mesaj(self, baslik="", mesaj="", ikon="bilgi"):
msg1 = QMessageBox()
if (ikon == "bilgi"):
msg1.setIcon(QMessageBox.Information)
elif(ikon == "uyari"):
msg1.setIcon(QMessageBox.Warning)
else:
msg1.setIcon(QMessageBox.Critical)
msg1.setStyleSheet("background:#28595e;")
msg1.setWindowTitle(baslik)
msg1.setText(mesaj)
msg1.exec_()
| true
| true
|
1c480b17ae493ad4fe4f5c5c467de99cb0d9fc59
| 1,192
|
py
|
Python
|
pyvisdk/do/updated_agent_being_restarted_event.py
|
Infinidat/pyvisdk
|
f2f4e5f50da16f659ccc1d84b6a00f397fa997f8
|
[
"MIT"
] | null | null | null |
pyvisdk/do/updated_agent_being_restarted_event.py
|
Infinidat/pyvisdk
|
f2f4e5f50da16f659ccc1d84b6a00f397fa997f8
|
[
"MIT"
] | null | null | null |
pyvisdk/do/updated_agent_being_restarted_event.py
|
Infinidat/pyvisdk
|
f2f4e5f50da16f659ccc1d84b6a00f397fa997f8
|
[
"MIT"
] | null | null | null |
import logging
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def UpdatedAgentBeingRestartedEvent(vim, *args, **kwargs):
'''This event records that the agent has been patched and will be restarted.'''
obj = vim.client.factory.create('{urn:vim25}UpdatedAgentBeingRestartedEvent')
# do some validation checking...
if (len(args) + len(kwargs)) < 4:
raise IndexError('Expected at least 5 arguments got: %d' % len(args))
required = [ 'chainId', 'createdTime', 'key', 'userName' ]
optional = [ 'changeTag', 'computeResource', 'datacenter', 'ds', 'dvs',
'fullFormattedMessage', 'host', 'net', 'vm', 'dynamicProperty', 'dynamicType' ]
for name, arg in zip(required+optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj
| 35.058824
| 124
| 0.615772
|
import logging
from pyvisdk.exceptions import InvalidArgumentError
| true
| true
|
1c480b184cf95bb9b7a3ca86aa9f6eb52d29969b
| 2,678
|
py
|
Python
|
sdk/python/pulumi_azure_native/network/v20180601/get_virtual_network_gateway_advertised_routes.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/network/v20180601/get_virtual_network_gateway_advertised_routes.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/network/v20180601/get_virtual_network_gateway_advertised_routes.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetVirtualNetworkGatewayAdvertisedRoutesResult',
'AwaitableGetVirtualNetworkGatewayAdvertisedRoutesResult',
'get_virtual_network_gateway_advertised_routes',
]
@pulumi.output_type
class GetVirtualNetworkGatewayAdvertisedRoutesResult:
"""
List of virtual network gateway routes
"""
def __init__(__self__, value=None):
if value and not isinstance(value, list):
raise TypeError("Expected argument 'value' to be a list")
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def value(self) -> Optional[Sequence['outputs.GatewayRouteResponse']]:
"""
List of gateway routes
"""
return pulumi.get(self, "value")
class AwaitableGetVirtualNetworkGatewayAdvertisedRoutesResult(GetVirtualNetworkGatewayAdvertisedRoutesResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetVirtualNetworkGatewayAdvertisedRoutesResult(
value=self.value)
def get_virtual_network_gateway_advertised_routes(peer: Optional[str] = None,
resource_group_name: Optional[str] = None,
virtual_network_gateway_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetVirtualNetworkGatewayAdvertisedRoutesResult:
"""
List of virtual network gateway routes
:param str peer: The IP address of the peer
:param str resource_group_name: The name of the resource group.
:param str virtual_network_gateway_name: The name of the virtual network gateway.
"""
__args__ = dict()
__args__['peer'] = peer
__args__['resourceGroupName'] = resource_group_name
__args__['virtualNetworkGatewayName'] = virtual_network_gateway_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:network/v20180601:getVirtualNetworkGatewayAdvertisedRoutes', __args__, opts=opts, typ=GetVirtualNetworkGatewayAdvertisedRoutesResult).value
return AwaitableGetVirtualNetworkGatewayAdvertisedRoutesResult(
value=__ret__.value)
| 38.257143
| 189
| 0.698282
|
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetVirtualNetworkGatewayAdvertisedRoutesResult',
'AwaitableGetVirtualNetworkGatewayAdvertisedRoutesResult',
'get_virtual_network_gateway_advertised_routes',
]
@pulumi.output_type
class GetVirtualNetworkGatewayAdvertisedRoutesResult:
def __init__(__self__, value=None):
if value and not isinstance(value, list):
raise TypeError("Expected argument 'value' to be a list")
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def value(self) -> Optional[Sequence['outputs.GatewayRouteResponse']]:
return pulumi.get(self, "value")
class AwaitableGetVirtualNetworkGatewayAdvertisedRoutesResult(GetVirtualNetworkGatewayAdvertisedRoutesResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetVirtualNetworkGatewayAdvertisedRoutesResult(
value=self.value)
def get_virtual_network_gateway_advertised_routes(peer: Optional[str] = None,
resource_group_name: Optional[str] = None,
virtual_network_gateway_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetVirtualNetworkGatewayAdvertisedRoutesResult:
__args__ = dict()
__args__['peer'] = peer
__args__['resourceGroupName'] = resource_group_name
__args__['virtualNetworkGatewayName'] = virtual_network_gateway_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:network/v20180601:getVirtualNetworkGatewayAdvertisedRoutes', __args__, opts=opts, typ=GetVirtualNetworkGatewayAdvertisedRoutesResult).value
return AwaitableGetVirtualNetworkGatewayAdvertisedRoutesResult(
value=__ret__.value)
| true
| true
|
1c480b25134b1e54200e0ddb780bd7bb0f122341
| 7,427
|
py
|
Python
|
tensorflow/contrib/cluster_resolver/python/training/cluster_resolver.py
|
tianyapiaozi/tensorflow
|
fb3ce0467766a8e91f1da0ad7ada7c24fde7a73a
|
[
"Apache-2.0"
] | 71
|
2017-05-25T16:02:15.000Z
|
2021-06-09T16:08:08.000Z
|
tensorflow/contrib/cluster_resolver/python/training/cluster_resolver.py
|
shrikunjsarda/tensorflow
|
7e8927e7af0c51ac20a63bd4eab6ff83df1a39ae
|
[
"Apache-2.0"
] | 133
|
2017-04-26T16:49:49.000Z
|
2019-10-15T11:39:26.000Z
|
tensorflow/contrib/cluster_resolver/python/training/cluster_resolver.py
|
shrikunjsarda/tensorflow
|
7e8927e7af0c51ac20a63bd4eab6ff83df1a39ae
|
[
"Apache-2.0"
] | 31
|
2018-09-11T02:17:17.000Z
|
2021-12-15T10:33:35.000Z
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Cluster Resolvers are used for dynamic cluster IP/hostname resolution."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
from tensorflow.python.training.server_lib import ClusterSpec
class ClusterResolver(object):
"""Abstract class for all implementations of ClusterResolvers.
This defines the skeleton for all implementations of ClusterResolvers.
ClusterResolvers are a way for TensorFlow to communicate with various cluster
management systems (e.g. GCE, AWS, etc...).
By letting TensorFlow communicate with these systems, we will be able to
automatically discover and resolve IP addresses for various TensorFlow
workers. This will eventually allow us to automatically recover from
underlying machine failures and scale TensorFlow worker clusters up and down.
"""
@abc.abstractmethod
def cluster_spec(self):
"""Retrieve the current state of the cluster and returns a ClusterSpec.
Returns:
A ClusterSpec representing the state of the cluster at the moment this
function is called.
Implementors of this function must take care in ensuring that the
ClusterSpec returned is up-to-date at the time of calling this function.
This usually means retrieving the information from the underlying cluster
management system every time this function is invoked and reconstructing
a cluster_spec, rather than attempting to cache anything.
"""
raise NotImplementedError(
'cluster_spec is not implemented for {}.'.format(self))
@abc.abstractmethod
def master(self):
"""..."""
raise NotImplementedError('master is not implemented for {}.'.format(self))
class SimpleClusterResolver(ClusterResolver):
"""Simple implementation of ClusterResolver that accepts a ClusterSpec."""
def __init__(self, cluster_spec, master=''):
"""Creates a SimpleClusterResolver from a ClusterSpec."""
super(SimpleClusterResolver, self).__init__()
if not isinstance(cluster_spec, ClusterSpec):
raise TypeError('cluster_spec must be a ClusterSpec.')
self._cluster_spec = cluster_spec
if not isinstance(master, str):
raise TypeError('master must be a string.')
self._master = master
def cluster_spec(self):
"""Returns the ClusterSpec passed into the constructor."""
return self._cluster_spec
def master(self):
"""Returns the master address to use when creating a session."""
return self._master
class UnionClusterResolver(ClusterResolver):
"""Performs a union on underlying ClusterResolvers.
This class performs a union given two or more existing ClusterResolvers. It
merges the underlying ClusterResolvers, and returns one unified ClusterSpec
when cluster_spec is called. The details of the merge function is
documented in the cluster_spec function.
"""
def __init__(self, *args):
"""Initializes a UnionClusterResolver with other ClusterResolvers.
Args:
*args: `ClusterResolver` objects to be unionized.
Raises:
TypeError: If any argument is not a subclass of `ClusterResolvers`.
ValueError: If there are no arguments passed.
"""
super(UnionClusterResolver, self).__init__()
if not args:
raise ValueError('At least one ClusterResolver is required.')
for cluster_resolver in args:
if not isinstance(cluster_resolver, ClusterResolver):
raise TypeError('All arguments must be a sub-class of '
'`ClusterResolver.`')
self._cluster_resolvers = args
def cluster_spec(self):
"""Returns a union of all the ClusterSpecs from the ClusterResolvers.
Returns:
A ClusterSpec containing host information merged from all the underlying
ClusterResolvers.
Raises:
KeyError: If there are conflicting keys detected when merging two or
more dictionaries, this exception is raised.
Note: If there are multiple ClusterResolvers exposing ClusterSpecs with the
same job name, we will merge the list/dict of workers.
If *all* underlying ClusterSpecs expose the set of workers as lists, we will
concatenate the lists of workers, starting with the list of workers from
the first ClusterResolver passed into the constructor.
If *any* of the ClusterSpecs expose the set of workers as a dict, we will
treat all the sets of workers as dicts (even if they are returned as lists)
and will only merge them into a dict if there is no conflicting keys. If
there is a conflicting key, we will raise a `KeyError`.
"""
merged_cluster = {}
# We figure out whether it is all lists for a particular job, or whether
# there are dicts inside.
for cluster_resolver in self._cluster_resolvers:
cluster_spec = cluster_resolver.cluster_spec()
cluster_dict = cluster_spec.as_dict()
for job_name, tasks in cluster_dict.items():
if job_name in merged_cluster:
# If we see a dict, then we write a dict out regardless.
if isinstance(tasks, dict):
merged_cluster[job_name] = {}
else:
# We take whichever type is present.
if isinstance(tasks, list):
merged_cluster[job_name] = []
else:
merged_cluster[job_name] = {}
# We then do the merge as appropriate in merged_cluster[job].
for cluster_resolver in self._cluster_resolvers:
cluster_spec = cluster_resolver.cluster_spec()
cluster_dict = cluster_spec.as_dict()
for job_name, tasks in cluster_dict.items():
if isinstance(merged_cluster[job_name], list):
# We all have lists, we can just concatenate and be done.
merged_cluster[job_name].extend(tasks)
else:
if isinstance(tasks, list):
# We convert to a dictionary if the type is a list.
task_dict = dict(zip(range(0, len(tasks)), tasks))
else:
# We can simply make a copy (for update) and be done.
task_dict = tasks.copy()
# We detect if there are duplicates, and raise an error if so.
task_keys = set(task_dict)
merged_keys = set(merged_cluster[job_name].keys())
intersected_keys = task_keys.intersection(merged_keys)
if intersected_keys:
raise KeyError('Duplicate keys detected when merging two '
'ClusterSpecs: %s' % repr(intersected_keys))
# We do the merge after all the processing.
merged_cluster[job_name].update(task_dict)
return ClusterSpec(merged_cluster)
def master(self):
"""master returns the master address from the first cluster resolver."""
return self._cluster_resolvers[0].master()
| 38.481865
| 80
| 0.706476
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
from tensorflow.python.training.server_lib import ClusterSpec
class ClusterResolver(object):
@abc.abstractmethod
def cluster_spec(self):
raise NotImplementedError(
'cluster_spec is not implemented for {}.'.format(self))
@abc.abstractmethod
def master(self):
raise NotImplementedError('master is not implemented for {}.'.format(self))
class SimpleClusterResolver(ClusterResolver):
def __init__(self, cluster_spec, master=''):
super(SimpleClusterResolver, self).__init__()
if not isinstance(cluster_spec, ClusterSpec):
raise TypeError('cluster_spec must be a ClusterSpec.')
self._cluster_spec = cluster_spec
if not isinstance(master, str):
raise TypeError('master must be a string.')
self._master = master
def cluster_spec(self):
return self._cluster_spec
def master(self):
return self._master
class UnionClusterResolver(ClusterResolver):
def __init__(self, *args):
super(UnionClusterResolver, self).__init__()
if not args:
raise ValueError('At least one ClusterResolver is required.')
for cluster_resolver in args:
if not isinstance(cluster_resolver, ClusterResolver):
raise TypeError('All arguments must be a sub-class of '
'`ClusterResolver.`')
self._cluster_resolvers = args
def cluster_spec(self):
merged_cluster = {}
for cluster_resolver in self._cluster_resolvers:
cluster_spec = cluster_resolver.cluster_spec()
cluster_dict = cluster_spec.as_dict()
for job_name, tasks in cluster_dict.items():
if job_name in merged_cluster:
if isinstance(tasks, dict):
merged_cluster[job_name] = {}
else:
if isinstance(tasks, list):
merged_cluster[job_name] = []
else:
merged_cluster[job_name] = {}
for cluster_resolver in self._cluster_resolvers:
cluster_spec = cluster_resolver.cluster_spec()
cluster_dict = cluster_spec.as_dict()
for job_name, tasks in cluster_dict.items():
if isinstance(merged_cluster[job_name], list):
merged_cluster[job_name].extend(tasks)
else:
if isinstance(tasks, list):
task_dict = dict(zip(range(0, len(tasks)), tasks))
else:
task_dict = tasks.copy()
task_keys = set(task_dict)
merged_keys = set(merged_cluster[job_name].keys())
intersected_keys = task_keys.intersection(merged_keys)
if intersected_keys:
raise KeyError('Duplicate keys detected when merging two '
'ClusterSpecs: %s' % repr(intersected_keys))
merged_cluster[job_name].update(task_dict)
return ClusterSpec(merged_cluster)
def master(self):
return self._cluster_resolvers[0].master()
| true
| true
|
1c480b50f62091e40b64bb5483b4200ef19d237f
| 24
|
py
|
Python
|
api/__init__.py
|
hugofer93/aimo-api
|
fe3cc3f169f7a46d4ba68625a7936f37f55b1aad
|
[
"MIT"
] | null | null | null |
api/__init__.py
|
hugofer93/aimo-api
|
fe3cc3f169f7a46d4ba68625a7936f37f55b1aad
|
[
"MIT"
] | 10
|
2020-09-07T07:23:08.000Z
|
2022-03-02T05:32:10.000Z
|
api/__init__.py
|
hugofer93/aimo-api
|
fe3cc3f169f7a46d4ba68625a7936f37f55b1aad
|
[
"MIT"
] | null | null | null |
from api.app import app
| 12
| 23
| 0.791667
|
from api.app import app
| true
| true
|
1c480bf2b8efe1f035b8d1c81bfb21a1df0ca5f0
| 2,546
|
py
|
Python
|
tests/test_factory.py
|
unt-libraries/aubrey-transcription
|
0383d67a0ed3f3ddaa306edfb09b2da1364e4178
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_factory.py
|
unt-libraries/aubrey-transcription
|
0383d67a0ed3f3ddaa306edfb09b2da1364e4178
|
[
"BSD-3-Clause"
] | 8
|
2018-08-22T19:14:38.000Z
|
2019-11-22T17:12:56.000Z
|
tests/test_factory.py
|
unt-libraries/aubrey-transcription
|
0383d67a0ed3f3ddaa306edfb09b2da1364e4178
|
[
"BSD-3-Clause"
] | null | null | null |
import mock
import pytest
from aubrey_transcription import create_app, default_settings
@mock.patch('aubrey_transcription.os.makedirs') # We don't want 'instance' dirs everywhere.
class TestCreateApp:
@pytest.fixture(scope="session")
def settings_file(self, tmpdir_factory):
settings = (
"PAIRTREE_BASE = '/different/path'"
"\nTRANSCRIPTION_URL = 'http://someothersite.com'"
"\nEXTENSIONS_META = {'txt': {'use': 'text', 'mimetype': 'text'}}"
"\nFILENAME_PATTERN = 'some regex'"
)
fn = tmpdir_factory.mktemp('data').join('settings.py')
fn.write(settings)
return fn
def test_makes_instance_dir(self, mock_makedirs):
app = create_app()
mock_makedirs.assert_called_once_with(app.instance_path)
def test_can_override_instance_dir(self, mock_makedirs):
create_app(instance_path='/new/path')
mock_makedirs.assert_called_once_with('/new/path')
def test_default_settings(self, mock_makedirs):
# Move instance path in case a settings file is already in the standard location.
app = create_app(instance_path='/nothing/here')
assert app.config['PAIRTREE_BASE'] == default_settings.PAIRTREE_BASE
assert app.config['TRANSCRIPTION_URL'] == default_settings.TRANSCRIPTION_URL
assert app.config['EXTENSIONS_META'] == default_settings.EXTENSIONS_META
assert app.config['FILENAME_PATTERN'] == default_settings.FILENAME_PATTERN
def test_instance_file_overrides_default_settings(self, mock_makedirs, settings_file):
app = create_app(instance_path=settings_file.dirname)
assert app.config['PAIRTREE_BASE'] == '/different/path'
assert app.config['TRANSCRIPTION_URL'] == 'http://someothersite.com'
assert app.config['EXTENSIONS_META'] == {'txt': {'use': 'text', 'mimetype': 'text'}}
assert app.config['FILENAME_PATTERN'] == 'some regex'
def test_settings_passed_in_overrides_instance_file(self, mock_makedirs, settings_file):
app = create_app(
test_config={'PAIRTREE_BASE': '/right/here', 'TRANSCRIPTION_URL': 'something.com',
'EXTENSIONS_META': {}, 'FILENAME_PATTERN': 'a pattern'},
instance_path=settings_file.dirname
)
assert app.config['PAIRTREE_BASE'] == '/right/here'
assert app.config['TRANSCRIPTION_URL'] == 'something.com'
assert app.config['EXTENSIONS_META'] == {}
assert app.config['FILENAME_PATTERN'] == 'a pattern'
| 47.148148
| 94
| 0.67989
|
import mock
import pytest
from aubrey_transcription import create_app, default_settings
@mock.patch('aubrey_transcription.os.makedirs')
class TestCreateApp:
@pytest.fixture(scope="session")
def settings_file(self, tmpdir_factory):
settings = (
"PAIRTREE_BASE = '/different/path'"
"\nTRANSCRIPTION_URL = 'http://someothersite.com'"
"\nEXTENSIONS_META = {'txt': {'use': 'text', 'mimetype': 'text'}}"
"\nFILENAME_PATTERN = 'some regex'"
)
fn = tmpdir_factory.mktemp('data').join('settings.py')
fn.write(settings)
return fn
def test_makes_instance_dir(self, mock_makedirs):
app = create_app()
mock_makedirs.assert_called_once_with(app.instance_path)
def test_can_override_instance_dir(self, mock_makedirs):
create_app(instance_path='/new/path')
mock_makedirs.assert_called_once_with('/new/path')
def test_default_settings(self, mock_makedirs):
# Move instance path in case a settings file is already in the standard location.
app = create_app(instance_path='/nothing/here')
assert app.config['PAIRTREE_BASE'] == default_settings.PAIRTREE_BASE
assert app.config['TRANSCRIPTION_URL'] == default_settings.TRANSCRIPTION_URL
assert app.config['EXTENSIONS_META'] == default_settings.EXTENSIONS_META
assert app.config['FILENAME_PATTERN'] == default_settings.FILENAME_PATTERN
def test_instance_file_overrides_default_settings(self, mock_makedirs, settings_file):
app = create_app(instance_path=settings_file.dirname)
assert app.config['PAIRTREE_BASE'] == '/different/path'
assert app.config['TRANSCRIPTION_URL'] == 'http://someothersite.com'
assert app.config['EXTENSIONS_META'] == {'txt': {'use': 'text', 'mimetype': 'text'}}
assert app.config['FILENAME_PATTERN'] == 'some regex'
def test_settings_passed_in_overrides_instance_file(self, mock_makedirs, settings_file):
app = create_app(
test_config={'PAIRTREE_BASE': '/right/here', 'TRANSCRIPTION_URL': 'something.com',
'EXTENSIONS_META': {}, 'FILENAME_PATTERN': 'a pattern'},
instance_path=settings_file.dirname
)
assert app.config['PAIRTREE_BASE'] == '/right/here'
assert app.config['TRANSCRIPTION_URL'] == 'something.com'
assert app.config['EXTENSIONS_META'] == {}
assert app.config['FILENAME_PATTERN'] == 'a pattern'
| true
| true
|
1c480c30b16efe1ba1ddd3320ec4bbc6d27e8a0b
| 644
|
py
|
Python
|
problems/cop/academic/CoinsGrid.py
|
xcsp3team/pycsp3
|
a11bc370e34cd3fe37faeae9a5df935fcbd7770d
|
[
"MIT"
] | 28
|
2019-12-14T09:25:52.000Z
|
2022-03-24T08:15:13.000Z
|
problems/cop/academic/CoinsGrid.py
|
xcsp3team/pycsp3
|
a11bc370e34cd3fe37faeae9a5df935fcbd7770d
|
[
"MIT"
] | 7
|
2020-04-15T11:02:07.000Z
|
2022-01-20T12:48:54.000Z
|
problems/cop/academic/CoinsGrid.py
|
xcsp3team/pycsp3
|
a11bc370e34cd3fe37faeae9a5df935fcbd7770d
|
[
"MIT"
] | 3
|
2020-04-15T08:23:45.000Z
|
2021-12-07T14:02:28.000Z
|
"""
See 'Constraint Solving and Planning with Picat' (page 43)
From Tony Hurlimann, A coin puzzle, SVOR-contest 2007
Some data: (8,4) (8,5) (9,4) (10,4) (31,14)
Examples of Execution:
python3 CoinsGrid.py
python3 CoinsGrid.py -data=[10,4]
"""
from pycsp3 import *
n, c = data or (8, 4)
# x[i][j] is 1 if a coin is placed at row i and column j
x = VarArray(size=[n, n], dom={0, 1})
satisfy(
[Sum(x[i]) == c for i in range(n)],
[Sum(x[:, j]) == c for j in range(n)]
)
minimize(
Sum(x[i][j] * abs(i - j) ** 2 for i in range(n) for j in range(n))
)
""" Comments
1) there are other variants in Hurlimann's paper (TODO)
"""
| 19.515152
| 70
| 0.610248
|
from pycsp3 import *
n, c = data or (8, 4)
x = VarArray(size=[n, n], dom={0, 1})
satisfy(
[Sum(x[i]) == c for i in range(n)],
[Sum(x[:, j]) == c for j in range(n)]
)
minimize(
Sum(x[i][j] * abs(i - j) ** 2 for i in range(n) for j in range(n))
)
| true
| true
|
1c480c456a6f3aede1614e5edca57e8015b7bcc5
| 749
|
py
|
Python
|
test/ppm_to_challenge.py
|
mrwonko/pwb2015
|
9e2f82086a72c12150dcd2bd44b52dc7ebb429fc
|
[
"MIT"
] | null | null | null |
test/ppm_to_challenge.py
|
mrwonko/pwb2015
|
9e2f82086a72c12150dcd2bd44b52dc7ebb429fc
|
[
"MIT"
] | null | null | null |
test/ppm_to_challenge.py
|
mrwonko/pwb2015
|
9e2f82086a72c12150dcd2bd44b52dc7ebb429fc
|
[
"MIT"
] | null | null | null |
import sys
with open( sys.argv[ 1 ] ) as f:
magic = f.readline()
assert( magic == "P3\n" )
comment = f.readline()
assert( comment.startswith( "#" ) )
dimensions = f.readline()
w, h = map( int, dimensions.split() )
_ = f.readline() # max
colors = {}
grid = []
for y in range( h ):
line = []
for x in range( w ):
color = tuple( [ int( f.readline() ) for _ in range( 3 ) ] )
if color not in colors:
colors[ color ] = len( colors ) + 1
line.append( colors[ color ] )
grid = [line] + grid
with open( sys.argv[ 1 ] + ".txt", "w" ) as o:
o.write( str( grid ).replace( " ", "" ) )
o.write( "\n" )
| 31.208333
| 73
| 0.459279
|
import sys
with open( sys.argv[ 1 ] ) as f:
magic = f.readline()
assert( magic == "P3\n" )
comment = f.readline()
assert( comment.startswith( "#" ) )
dimensions = f.readline()
w, h = map( int, dimensions.split() )
_ = f.readline()
colors = {}
grid = []
for y in range( h ):
line = []
for x in range( w ):
color = tuple( [ int( f.readline() ) for _ in range( 3 ) ] )
if color not in colors:
colors[ color ] = len( colors ) + 1
line.append( colors[ color ] )
grid = [line] + grid
with open( sys.argv[ 1 ] + ".txt", "w" ) as o:
o.write( str( grid ).replace( " ", "" ) )
o.write( "\n" )
| true
| true
|
1c480d61ea3390831e6d2b87dc17df6cf61992b3
| 3,911
|
py
|
Python
|
tapia/exercise5/ex5.py
|
appfs/appfs
|
8cbbfa0e40e4d4a75a498ce8dd894bb2fbc3a9e3
|
[
"MIT"
] | 11
|
2017-04-21T11:39:55.000Z
|
2022-02-11T20:25:18.000Z
|
tapia/exercise5/ex5.py
|
appfs/appfs
|
8cbbfa0e40e4d4a75a498ce8dd894bb2fbc3a9e3
|
[
"MIT"
] | 69
|
2017-04-26T09:30:38.000Z
|
2017-08-01T11:31:21.000Z
|
tapia/exercise5/ex5.py
|
appfs/appfs
|
8cbbfa0e40e4d4a75a498ce8dd894bb2fbc3a9e3
|
[
"MIT"
] | 53
|
2017-04-20T16:16:11.000Z
|
2017-07-19T12:53:01.000Z
|
import sys
import networkx as nx
def is_valid(line):
"""
Checks if the content of an edge has a valid format.
<vertex vertex weight>
:param line: A line of the input text.
:type: String
:return: A list if edge is valid, None otherwise.
"""
edge = line.rsplit()
wrong_args_number = len(edge) != 3
is_comment = line.startswith("#")
if wrong_args_number or not edge or is_comment:
return None
try:
int(edge[0])
int(edge[1])
int(edge[2])
except ValueError:
return None
return edge
def generate_edges(content):
"""
Yields an edge <vertex vertex weight> to the graph if it's valid.
:param content: The raw content of the input file.
"""
for edge in content:
valid_edge = is_valid(edge)
if valid_edge:
yield valid_edge
def load_graph_data(content):
"""
Loads the content of the input file as a graph and returns
a graph structure and its first node.
:param content: The raw content of the input file.
:return: An undirected Graph and a Node.
"""
G = nx.Graph()
edges = generate_edges(content)
for edge in edges:
if edge:
G.add_edge(edge[0], edge[1], weight=edge[2])
return G
def dijkstra(graph):
"""
Shortest path algorithm. Returns a dictionary with the shortest distance from each
node in the graph to the initial node '1'.
:param graph: A Graph structure
:return: Dictionary with <node: distance> where the distance is the shortest path to the initial node.
"""
nodes = graph.nodes()
initial_node_index = nodes.index('1')
initial_node = nodes[initial_node_index]
neighbors_initial_node = graph.neighbors(initial_node)
distances = dict()
for node in nodes:
if node in neighbors_initial_node:
distances[node] = int(graph[initial_node][node]['weight'])
else:
distances[node] = 2000000000
distances[initial_node] = 0
visited = [initial_node]
while sorted(nodes) != sorted(visited):
current_node = min_vertex(distances, visited)
visited.append(current_node)
for neighbor in graph.neighbors(current_node):
distance_to_neighbor = int(graph[current_node][neighbor]['weight'])
temp_dist = distances[current_node] + distance_to_neighbor
if temp_dist < distances[neighbor]:
distances[neighbor] = temp_dist
return distances
def min_vertex(distances, visited):
'''
Returns the vector with the minimum distance which is not yet visited.
:param distances: A dictionary with <node: distance> structure
:param visited: A list with the visited nodes
:return: The non-visited vertex with the minimum distance
'''
min_distance = 2000000000
min_vertex = None
for node, neighbor_distance in distances.items():
if node in visited:
continue
if neighbor_distance < min_distance:
min_distance = neighbor_distance
min_vertex = node
return min_vertex
if __name__ == '__main__':
filename = sys.argv[-1]
content = open(filename)
graph = load_graph_data(content)
distances = dijkstra(graph)
result_dist = 2000000000
result_vertex = None
for k, v in distances.items():
if k == '1':
continue
if v < result_dist:
result_dist = v
result_vertex = k
elif v == result_dist:
key_list = list(distances.keys())
index_result = key_list.index(result_vertex)
index_current = key_list.index(k)
if index_result < index_current:
result_vertex = k
print('RESULT VERTEX {result_vertex}'.format(result_vertex=result_vertex))
print('RESULT DIST {result_dist}'.format(result_dist=result_dist))
| 26.605442
| 106
| 0.639223
|
import sys
import networkx as nx
def is_valid(line):
edge = line.rsplit()
wrong_args_number = len(edge) != 3
is_comment = line.startswith("#")
if wrong_args_number or not edge or is_comment:
return None
try:
int(edge[0])
int(edge[1])
int(edge[2])
except ValueError:
return None
return edge
def generate_edges(content):
for edge in content:
valid_edge = is_valid(edge)
if valid_edge:
yield valid_edge
def load_graph_data(content):
G = nx.Graph()
edges = generate_edges(content)
for edge in edges:
if edge:
G.add_edge(edge[0], edge[1], weight=edge[2])
return G
def dijkstra(graph):
nodes = graph.nodes()
initial_node_index = nodes.index('1')
initial_node = nodes[initial_node_index]
neighbors_initial_node = graph.neighbors(initial_node)
distances = dict()
for node in nodes:
if node in neighbors_initial_node:
distances[node] = int(graph[initial_node][node]['weight'])
else:
distances[node] = 2000000000
distances[initial_node] = 0
visited = [initial_node]
while sorted(nodes) != sorted(visited):
current_node = min_vertex(distances, visited)
visited.append(current_node)
for neighbor in graph.neighbors(current_node):
distance_to_neighbor = int(graph[current_node][neighbor]['weight'])
temp_dist = distances[current_node] + distance_to_neighbor
if temp_dist < distances[neighbor]:
distances[neighbor] = temp_dist
return distances
def min_vertex(distances, visited):
min_distance = 2000000000
min_vertex = None
for node, neighbor_distance in distances.items():
if node in visited:
continue
if neighbor_distance < min_distance:
min_distance = neighbor_distance
min_vertex = node
return min_vertex
if __name__ == '__main__':
filename = sys.argv[-1]
content = open(filename)
graph = load_graph_data(content)
distances = dijkstra(graph)
result_dist = 2000000000
result_vertex = None
for k, v in distances.items():
if k == '1':
continue
if v < result_dist:
result_dist = v
result_vertex = k
elif v == result_dist:
key_list = list(distances.keys())
index_result = key_list.index(result_vertex)
index_current = key_list.index(k)
if index_result < index_current:
result_vertex = k
print('RESULT VERTEX {result_vertex}'.format(result_vertex=result_vertex))
print('RESULT DIST {result_dist}'.format(result_dist=result_dist))
| true
| true
|
1c480e0170646c335358d40a83a3d3909901aa65
| 18,084
|
py
|
Python
|
sphinx/util/logging.py
|
jessetan/sphinx
|
4cae0ecf8641551028b1a54168e49d52cb6bc7f3
|
[
"BSD-2-Clause"
] | null | null | null |
sphinx/util/logging.py
|
jessetan/sphinx
|
4cae0ecf8641551028b1a54168e49d52cb6bc7f3
|
[
"BSD-2-Clause"
] | null | null | null |
sphinx/util/logging.py
|
jessetan/sphinx
|
4cae0ecf8641551028b1a54168e49d52cb6bc7f3
|
[
"BSD-2-Clause"
] | null | null | null |
"""
sphinx.util.logging
~~~~~~~~~~~~~~~~~~~
Logging utility functions for Sphinx.
:copyright: Copyright 2007-2021 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import logging
import logging.handlers
from collections import defaultdict
from contextlib import contextmanager
from typing import IO, TYPE_CHECKING, Any, Dict, Generator, List, Tuple, Type, Union
from docutils import nodes
from docutils.nodes import Node
from docutils.utils import get_source_line
from sphinx.errors import SphinxWarning
from sphinx.util.console import colorize
if TYPE_CHECKING:
from sphinx.application import Sphinx
NAMESPACE = 'sphinx'
VERBOSE = 15
LEVEL_NAMES = defaultdict(lambda: logging.WARNING) # type: Dict[str, int]
LEVEL_NAMES.update({
'CRITICAL': logging.CRITICAL,
'SEVERE': logging.CRITICAL,
'ERROR': logging.ERROR,
'WARNING': logging.WARNING,
'INFO': logging.INFO,
'VERBOSE': VERBOSE,
'DEBUG': logging.DEBUG,
})
VERBOSITY_MAP = defaultdict(lambda: 0) # type: Dict[int, int]
VERBOSITY_MAP.update({
0: logging.INFO,
1: VERBOSE,
2: logging.DEBUG,
})
COLOR_MAP = defaultdict(lambda: 'blue',
{
logging.ERROR: 'darkred',
logging.WARNING: 'red',
logging.DEBUG: 'darkgray'
})
def getLogger(name: str) -> "SphinxLoggerAdapter":
"""Get logger wrapped by :class:`sphinx.util.logging.SphinxLoggerAdapter`.
Sphinx logger always uses ``sphinx.*`` namespace to be independent from
settings of root logger. It ensures logging is consistent even if a
third-party extension or imported application resets logger settings.
Example usage::
>>> from sphinx.util import logging
>>> logger = logging.getLogger(__name__)
>>> logger.info('Hello, this is an extension!')
Hello, this is an extension!
"""
# add sphinx prefix to name forcely
logger = logging.getLogger(NAMESPACE + '.' + name)
# Forcely enable logger
logger.disabled = False
# wrap logger by SphinxLoggerAdapter
return SphinxLoggerAdapter(logger, {})
def convert_serializable(records: List[logging.LogRecord]) -> None:
"""Convert LogRecord serializable."""
for r in records:
# extract arguments to a message and clear them
r.msg = r.getMessage()
r.args = ()
location = getattr(r, 'location', None)
if isinstance(location, nodes.Node):
r.location = get_node_location(location) # type: ignore
class SphinxLogRecord(logging.LogRecord):
"""Log record class supporting location"""
prefix = ''
location = None # type: Any
def getMessage(self) -> str:
message = super().getMessage()
location = getattr(self, 'location', None)
if location:
message = '%s: %s%s' % (location, self.prefix, message)
elif self.prefix not in message:
message = self.prefix + message
return message
class SphinxInfoLogRecord(SphinxLogRecord):
"""Info log record class supporting location"""
prefix = '' # do not show any prefix for INFO messages
class SphinxWarningLogRecord(SphinxLogRecord):
"""Warning log record class supporting location"""
prefix = 'WARNING: '
class SphinxLoggerAdapter(logging.LoggerAdapter):
"""LoggerAdapter allowing ``type`` and ``subtype`` keywords."""
KEYWORDS = ['type', 'subtype', 'location', 'nonl', 'color', 'once']
def log(self, level: Union[int, str], msg: str, *args: Any, **kwargs: Any) -> None:
if isinstance(level, int):
super().log(level, msg, *args, **kwargs)
else:
levelno = LEVEL_NAMES[level]
super().log(levelno, msg, *args, **kwargs)
def verbose(self, msg: str, *args: Any, **kwargs: Any) -> None:
self.log(VERBOSE, msg, *args, **kwargs)
def process(self, msg: str, kwargs: Dict) -> Tuple[str, Dict]: # type: ignore
extra = kwargs.setdefault('extra', {})
for keyword in self.KEYWORDS:
if keyword in kwargs:
extra[keyword] = kwargs.pop(keyword)
return msg, kwargs
def handle(self, record: logging.LogRecord) -> None:
self.logger.handle(record)
class WarningStreamHandler(logging.StreamHandler):
"""StreamHandler for warnings."""
pass
class NewLineStreamHandler(logging.StreamHandler):
"""StreamHandler which switches line terminator by record.nonl flag."""
def emit(self, record: logging.LogRecord) -> None:
try:
self.acquire()
if getattr(record, 'nonl', False):
# skip appending terminator when nonl=True
self.terminator = ''
super().emit(record)
finally:
self.terminator = '\n'
self.release()
class MemoryHandler(logging.handlers.BufferingHandler):
"""Handler buffering all logs."""
def __init__(self) -> None:
super().__init__(-1)
def shouldFlush(self, record: logging.LogRecord) -> bool:
return False # never flush
def flushTo(self, logger: logging.Logger) -> None:
self.acquire()
try:
for record in self.buffer:
logger.handle(record)
self.buffer = [] # type: List[logging.LogRecord]
finally:
self.release()
def clear(self) -> List[logging.LogRecord]:
buffer, self.buffer = self.buffer, []
return buffer
@contextmanager
def pending_warnings() -> Generator[logging.Handler, None, None]:
"""Contextmanager to pend logging warnings temporary.
Similar to :func:`pending_logging`.
"""
logger = logging.getLogger(NAMESPACE)
memhandler = MemoryHandler()
memhandler.setLevel(logging.WARNING)
try:
handlers = []
for handler in logger.handlers[:]:
if isinstance(handler, WarningStreamHandler):
logger.removeHandler(handler)
handlers.append(handler)
logger.addHandler(memhandler)
yield memhandler
finally:
logger.removeHandler(memhandler)
for handler in handlers:
logger.addHandler(handler)
memhandler.flushTo(logger)
@contextmanager
def suppress_logging() -> Generator[MemoryHandler, None, None]:
"""Contextmanager to suppress logging all logs temporary.
For example::
>>> with suppress_logging():
>>> logger.warning('Warning message!') # suppressed
>>> some_long_process()
>>>
"""
logger = logging.getLogger(NAMESPACE)
memhandler = MemoryHandler()
try:
handlers = []
for handler in logger.handlers[:]:
logger.removeHandler(handler)
handlers.append(handler)
logger.addHandler(memhandler)
yield memhandler
finally:
logger.removeHandler(memhandler)
for handler in handlers:
logger.addHandler(handler)
@contextmanager
def pending_logging() -> Generator[MemoryHandler, None, None]:
"""Contextmanager to pend logging all logs temporary.
For example::
>>> with pending_logging():
>>> logger.warning('Warning message!') # not flushed yet
>>> some_long_process()
>>>
Warning message! # the warning is flushed here
"""
logger = logging.getLogger(NAMESPACE)
try:
with suppress_logging() as memhandler:
yield memhandler
finally:
memhandler.flushTo(logger)
@contextmanager
def skip_warningiserror(skip: bool = True) -> Generator[None, None, None]:
"""contextmanager to skip WarningIsErrorFilter for a while."""
logger = logging.getLogger(NAMESPACE)
if skip is False:
yield
else:
try:
disabler = DisableWarningIsErrorFilter()
for handler in logger.handlers:
# use internal method; filters.insert() directly to install disabler
# before WarningIsErrorFilter
handler.filters.insert(0, disabler)
yield
finally:
for handler in logger.handlers:
handler.removeFilter(disabler)
@contextmanager
def prefixed_warnings(prefix: str) -> Generator[None, None, None]:
"""Prepend prefix to all records for a while.
For example::
>>> with prefixed_warnings("prefix:"):
>>> logger.warning('Warning message!') # => prefix: Warning message!
.. versionadded:: 2.0
"""
logger = logging.getLogger(NAMESPACE)
warning_handler = None
for handler in logger.handlers:
if isinstance(handler, WarningStreamHandler):
warning_handler = handler
break
else:
# warning stream not found
yield
return
prefix_filter = None
for _filter in warning_handler.filters:
if isinstance(_filter, MessagePrefixFilter):
prefix_filter = _filter
break
if prefix_filter:
# already prefixed
try:
previous = prefix_filter.prefix
prefix_filter.prefix = prefix
yield
finally:
prefix_filter.prefix = previous
else:
# not prefixed yet
try:
prefix_filter = MessagePrefixFilter(prefix)
warning_handler.addFilter(prefix_filter)
yield
finally:
warning_handler.removeFilter(prefix_filter)
class LogCollector:
def __init__(self) -> None:
self.logs = [] # type: List[logging.LogRecord]
@contextmanager
def collect(self) -> Generator[None, None, None]:
with pending_logging() as memhandler:
yield
self.logs = memhandler.clear()
class InfoFilter(logging.Filter):
"""Filter error and warning messages."""
def filter(self, record: logging.LogRecord) -> bool:
if record.levelno < logging.WARNING:
return True
else:
return False
def is_suppressed_warning(type: str, subtype: str, suppress_warnings: List[str]) -> bool:
"""Check the warning is suppressed or not."""
if type is None:
return False
for warning_type in suppress_warnings:
if '.' in warning_type:
target, subtarget = warning_type.split('.', 1)
else:
target, subtarget = warning_type, None
if target == type:
if (subtype is None or subtarget is None or
subtarget == subtype or subtarget == '*'):
return True
return False
class WarningSuppressor(logging.Filter):
"""Filter logs by `suppress_warnings`."""
def __init__(self, app: "Sphinx") -> None:
self.app = app
super().__init__()
def filter(self, record: logging.LogRecord) -> bool:
type = getattr(record, 'type', None)
subtype = getattr(record, 'subtype', None)
try:
suppress_warnings = self.app.config.suppress_warnings
except AttributeError:
# config is not initialized yet (ex. in conf.py)
suppress_warnings = []
if is_suppressed_warning(type, subtype, suppress_warnings):
return False
else:
self.app._warncount += 1
return True
class WarningIsErrorFilter(logging.Filter):
"""Raise exception if warning emitted."""
def __init__(self, app: "Sphinx") -> None:
self.app = app
super().__init__()
def filter(self, record: logging.LogRecord) -> bool:
if getattr(record, 'skip_warningsiserror', False):
# disabled by DisableWarningIsErrorFilter
return True
elif self.app.warningiserror:
location = getattr(record, 'location', '')
try:
message = record.msg % record.args
except (TypeError, ValueError):
message = record.msg # use record.msg itself
if location:
exc = SphinxWarning(location + ":" + str(message))
else:
exc = SphinxWarning(message)
if record.exc_info is not None:
raise exc from record.exc_info[1]
else:
raise exc
else:
return True
class DisableWarningIsErrorFilter(logging.Filter):
"""Disable WarningIsErrorFilter if this filter installed."""
def filter(self, record: logging.LogRecord) -> bool:
record.skip_warningsiserror = True # type: ignore
return True
class MessagePrefixFilter(logging.Filter):
"""Prepend prefix to all records."""
def __init__(self, prefix: str) -> None:
self.prefix = prefix
super().__init__()
def filter(self, record: logging.LogRecord) -> bool:
if self.prefix:
record.msg = self.prefix + ' ' + record.msg
return True
class OnceFilter(logging.Filter):
"""Show the message only once."""
def __init__(self, name: str = '') -> None:
super().__init__(name)
self.messages = {} # type: Dict[str, List]
def filter(self, record: logging.LogRecord) -> bool:
once = getattr(record, 'once', '')
if not once:
return True
else:
params = self.messages.setdefault(record.msg, [])
if record.args in params:
return False
params.append(record.args)
return True
class SphinxLogRecordTranslator(logging.Filter):
"""Converts a log record to one Sphinx expects
* Make a instance of SphinxLogRecord
* docname to path if location given
"""
LogRecordClass = None # type: Type[logging.LogRecord]
def __init__(self, app: "Sphinx") -> None:
self.app = app
super().__init__()
def filter(self, record: SphinxWarningLogRecord) -> bool: # type: ignore
if isinstance(record, logging.LogRecord):
# force subclassing to handle location
record.__class__ = self.LogRecordClass # type: ignore
location = getattr(record, 'location', None)
if isinstance(location, tuple):
docname, lineno = location
if docname and lineno:
record.location = '%s:%s' % (self.app.env.doc2path(docname), lineno)
elif docname:
record.location = '%s' % self.app.env.doc2path(docname)
else:
record.location = None
elif isinstance(location, nodes.Node):
record.location = get_node_location(location)
elif location and ':' not in location:
record.location = '%s' % self.app.env.doc2path(location)
return True
class InfoLogRecordTranslator(SphinxLogRecordTranslator):
"""LogRecordTranslator for INFO level log records."""
LogRecordClass = SphinxInfoLogRecord
class WarningLogRecordTranslator(SphinxLogRecordTranslator):
"""LogRecordTranslator for WARNING level log records."""
LogRecordClass = SphinxWarningLogRecord
def get_node_location(node: Node) -> str:
(source, line) = get_source_line(node)
if source and line:
return "%s:%s" % (source, line)
elif source:
return "%s:" % source
elif line:
return "<unknown>:%s" % line
else:
return None
class ColorizeFormatter(logging.Formatter):
def format(self, record: logging.LogRecord) -> str:
message = super().format(record)
color = getattr(record, 'color', None)
if color is None:
color = COLOR_MAP.get(record.levelno)
if color:
return colorize(color, message)
else:
return message
class SafeEncodingWriter:
"""Stream writer which ignores UnicodeEncodeError silently"""
def __init__(self, stream: IO) -> None:
self.stream = stream
self.encoding = getattr(stream, 'encoding', 'ascii') or 'ascii'
def write(self, data: str) -> None:
try:
self.stream.write(data)
except UnicodeEncodeError:
# stream accept only str, not bytes. So, we encode and replace
# non-encodable characters, then decode them.
self.stream.write(data.encode(self.encoding, 'replace').decode(self.encoding))
def flush(self) -> None:
if hasattr(self.stream, 'flush'):
self.stream.flush()
class LastMessagesWriter:
"""Stream writer which memories last 10 messages to save trackback"""
def __init__(self, app: "Sphinx", stream: IO) -> None:
self.app = app
def write(self, data: str) -> None:
self.app.messagelog.append(data)
def setup(app: "Sphinx", status: IO, warning: IO) -> None:
"""Setup root logger for Sphinx"""
logger = logging.getLogger(NAMESPACE)
logger.setLevel(logging.DEBUG)
logger.propagate = False
# clear all handlers
for handler in logger.handlers[:]:
logger.removeHandler(handler)
info_handler = NewLineStreamHandler(SafeEncodingWriter(status)) # type: ignore
info_handler.addFilter(InfoFilter())
info_handler.addFilter(InfoLogRecordTranslator(app))
info_handler.setLevel(VERBOSITY_MAP[app.verbosity])
info_handler.setFormatter(ColorizeFormatter())
warning_handler = WarningStreamHandler(SafeEncodingWriter(warning)) # type: ignore
warning_handler.addFilter(WarningSuppressor(app))
warning_handler.addFilter(WarningLogRecordTranslator(app))
warning_handler.addFilter(WarningIsErrorFilter(app))
warning_handler.addFilter(OnceFilter())
warning_handler.setLevel(logging.WARNING)
warning_handler.setFormatter(ColorizeFormatter())
messagelog_handler = logging.StreamHandler(LastMessagesWriter(app, status)) # type: ignore
messagelog_handler.addFilter(InfoFilter())
messagelog_handler.setLevel(VERBOSITY_MAP[app.verbosity])
messagelog_handler.setFormatter(ColorizeFormatter())
logger.addHandler(info_handler)
logger.addHandler(warning_handler)
logger.addHandler(messagelog_handler)
| 30.444444
| 95
| 0.626742
|
import logging
import logging.handlers
from collections import defaultdict
from contextlib import contextmanager
from typing import IO, TYPE_CHECKING, Any, Dict, Generator, List, Tuple, Type, Union
from docutils import nodes
from docutils.nodes import Node
from docutils.utils import get_source_line
from sphinx.errors import SphinxWarning
from sphinx.util.console import colorize
if TYPE_CHECKING:
from sphinx.application import Sphinx
NAMESPACE = 'sphinx'
VERBOSE = 15
LEVEL_NAMES = defaultdict(lambda: logging.WARNING)
LEVEL_NAMES.update({
'CRITICAL': logging.CRITICAL,
'SEVERE': logging.CRITICAL,
'ERROR': logging.ERROR,
'WARNING': logging.WARNING,
'INFO': logging.INFO,
'VERBOSE': VERBOSE,
'DEBUG': logging.DEBUG,
})
VERBOSITY_MAP = defaultdict(lambda: 0)
VERBOSITY_MAP.update({
0: logging.INFO,
1: VERBOSE,
2: logging.DEBUG,
})
COLOR_MAP = defaultdict(lambda: 'blue',
{
logging.ERROR: 'darkred',
logging.WARNING: 'red',
logging.DEBUG: 'darkgray'
})
def getLogger(name: str) -> "SphinxLoggerAdapter":
logger = logging.getLogger(NAMESPACE + '.' + name)
logger.disabled = False
return SphinxLoggerAdapter(logger, {})
def convert_serializable(records: List[logging.LogRecord]) -> None:
for r in records:
r.msg = r.getMessage()
r.args = ()
location = getattr(r, 'location', None)
if isinstance(location, nodes.Node):
r.location = get_node_location(location)
class SphinxLogRecord(logging.LogRecord):
prefix = ''
location = None
def getMessage(self) -> str:
message = super().getMessage()
location = getattr(self, 'location', None)
if location:
message = '%s: %s%s' % (location, self.prefix, message)
elif self.prefix not in message:
message = self.prefix + message
return message
class SphinxInfoLogRecord(SphinxLogRecord):
prefix = ''
class SphinxWarningLogRecord(SphinxLogRecord):
prefix = 'WARNING: '
class SphinxLoggerAdapter(logging.LoggerAdapter):
KEYWORDS = ['type', 'subtype', 'location', 'nonl', 'color', 'once']
def log(self, level: Union[int, str], msg: str, *args: Any, **kwargs: Any) -> None:
if isinstance(level, int):
super().log(level, msg, *args, **kwargs)
else:
levelno = LEVEL_NAMES[level]
super().log(levelno, msg, *args, **kwargs)
def verbose(self, msg: str, *args: Any, **kwargs: Any) -> None:
self.log(VERBOSE, msg, *args, **kwargs)
def process(self, msg: str, kwargs: Dict) -> Tuple[str, Dict]:
extra = kwargs.setdefault('extra', {})
for keyword in self.KEYWORDS:
if keyword in kwargs:
extra[keyword] = kwargs.pop(keyword)
return msg, kwargs
def handle(self, record: logging.LogRecord) -> None:
self.logger.handle(record)
class WarningStreamHandler(logging.StreamHandler):
pass
class NewLineStreamHandler(logging.StreamHandler):
def emit(self, record: logging.LogRecord) -> None:
try:
self.acquire()
if getattr(record, 'nonl', False):
self.terminator = ''
super().emit(record)
finally:
self.terminator = '\n'
self.release()
class MemoryHandler(logging.handlers.BufferingHandler):
def __init__(self) -> None:
super().__init__(-1)
def shouldFlush(self, record: logging.LogRecord) -> bool:
return False
def flushTo(self, logger: logging.Logger) -> None:
self.acquire()
try:
for record in self.buffer:
logger.handle(record)
self.buffer = []
finally:
self.release()
def clear(self) -> List[logging.LogRecord]:
buffer, self.buffer = self.buffer, []
return buffer
@contextmanager
def pending_warnings() -> Generator[logging.Handler, None, None]:
logger = logging.getLogger(NAMESPACE)
memhandler = MemoryHandler()
memhandler.setLevel(logging.WARNING)
try:
handlers = []
for handler in logger.handlers[:]:
if isinstance(handler, WarningStreamHandler):
logger.removeHandler(handler)
handlers.append(handler)
logger.addHandler(memhandler)
yield memhandler
finally:
logger.removeHandler(memhandler)
for handler in handlers:
logger.addHandler(handler)
memhandler.flushTo(logger)
@contextmanager
def suppress_logging() -> Generator[MemoryHandler, None, None]:
logger = logging.getLogger(NAMESPACE)
memhandler = MemoryHandler()
try:
handlers = []
for handler in logger.handlers[:]:
logger.removeHandler(handler)
handlers.append(handler)
logger.addHandler(memhandler)
yield memhandler
finally:
logger.removeHandler(memhandler)
for handler in handlers:
logger.addHandler(handler)
@contextmanager
def pending_logging() -> Generator[MemoryHandler, None, None]:
logger = logging.getLogger(NAMESPACE)
try:
with suppress_logging() as memhandler:
yield memhandler
finally:
memhandler.flushTo(logger)
@contextmanager
def skip_warningiserror(skip: bool = True) -> Generator[None, None, None]:
logger = logging.getLogger(NAMESPACE)
if skip is False:
yield
else:
try:
disabler = DisableWarningIsErrorFilter()
for handler in logger.handlers:
handler.filters.insert(0, disabler)
yield
finally:
for handler in logger.handlers:
handler.removeFilter(disabler)
@contextmanager
def prefixed_warnings(prefix: str) -> Generator[None, None, None]:
logger = logging.getLogger(NAMESPACE)
warning_handler = None
for handler in logger.handlers:
if isinstance(handler, WarningStreamHandler):
warning_handler = handler
break
else:
yield
return
prefix_filter = None
for _filter in warning_handler.filters:
if isinstance(_filter, MessagePrefixFilter):
prefix_filter = _filter
break
if prefix_filter:
try:
previous = prefix_filter.prefix
prefix_filter.prefix = prefix
yield
finally:
prefix_filter.prefix = previous
else:
try:
prefix_filter = MessagePrefixFilter(prefix)
warning_handler.addFilter(prefix_filter)
yield
finally:
warning_handler.removeFilter(prefix_filter)
class LogCollector:
def __init__(self) -> None:
self.logs = []
@contextmanager
def collect(self) -> Generator[None, None, None]:
with pending_logging() as memhandler:
yield
self.logs = memhandler.clear()
class InfoFilter(logging.Filter):
def filter(self, record: logging.LogRecord) -> bool:
if record.levelno < logging.WARNING:
return True
else:
return False
def is_suppressed_warning(type: str, subtype: str, suppress_warnings: List[str]) -> bool:
if type is None:
return False
for warning_type in suppress_warnings:
if '.' in warning_type:
target, subtarget = warning_type.split('.', 1)
else:
target, subtarget = warning_type, None
if target == type:
if (subtype is None or subtarget is None or
subtarget == subtype or subtarget == '*'):
return True
return False
class WarningSuppressor(logging.Filter):
def __init__(self, app: "Sphinx") -> None:
self.app = app
super().__init__()
def filter(self, record: logging.LogRecord) -> bool:
type = getattr(record, 'type', None)
subtype = getattr(record, 'subtype', None)
try:
suppress_warnings = self.app.config.suppress_warnings
except AttributeError:
suppress_warnings = []
if is_suppressed_warning(type, subtype, suppress_warnings):
return False
else:
self.app._warncount += 1
return True
class WarningIsErrorFilter(logging.Filter):
def __init__(self, app: "Sphinx") -> None:
self.app = app
super().__init__()
def filter(self, record: logging.LogRecord) -> bool:
if getattr(record, 'skip_warningsiserror', False):
return True
elif self.app.warningiserror:
location = getattr(record, 'location', '')
try:
message = record.msg % record.args
except (TypeError, ValueError):
message = record.msg
if location:
exc = SphinxWarning(location + ":" + str(message))
else:
exc = SphinxWarning(message)
if record.exc_info is not None:
raise exc from record.exc_info[1]
else:
raise exc
else:
return True
class DisableWarningIsErrorFilter(logging.Filter):
def filter(self, record: logging.LogRecord) -> bool:
record.skip_warningsiserror = True
return True
class MessagePrefixFilter(logging.Filter):
def __init__(self, prefix: str) -> None:
self.prefix = prefix
super().__init__()
def filter(self, record: logging.LogRecord) -> bool:
if self.prefix:
record.msg = self.prefix + ' ' + record.msg
return True
class OnceFilter(logging.Filter):
def __init__(self, name: str = '') -> None:
super().__init__(name)
self.messages = {}
def filter(self, record: logging.LogRecord) -> bool:
once = getattr(record, 'once', '')
if not once:
return True
else:
params = self.messages.setdefault(record.msg, [])
if record.args in params:
return False
params.append(record.args)
return True
class SphinxLogRecordTranslator(logging.Filter):
LogRecordClass = None
def __init__(self, app: "Sphinx") -> None:
self.app = app
super().__init__()
def filter(self, record: SphinxWarningLogRecord) -> bool:
if isinstance(record, logging.LogRecord):
record.__class__ = self.LogRecordClass
location = getattr(record, 'location', None)
if isinstance(location, tuple):
docname, lineno = location
if docname and lineno:
record.location = '%s:%s' % (self.app.env.doc2path(docname), lineno)
elif docname:
record.location = '%s' % self.app.env.doc2path(docname)
else:
record.location = None
elif isinstance(location, nodes.Node):
record.location = get_node_location(location)
elif location and ':' not in location:
record.location = '%s' % self.app.env.doc2path(location)
return True
class InfoLogRecordTranslator(SphinxLogRecordTranslator):
LogRecordClass = SphinxInfoLogRecord
class WarningLogRecordTranslator(SphinxLogRecordTranslator):
LogRecordClass = SphinxWarningLogRecord
def get_node_location(node: Node) -> str:
(source, line) = get_source_line(node)
if source and line:
return "%s:%s" % (source, line)
elif source:
return "%s:" % source
elif line:
return "<unknown>:%s" % line
else:
return None
class ColorizeFormatter(logging.Formatter):
def format(self, record: logging.LogRecord) -> str:
message = super().format(record)
color = getattr(record, 'color', None)
if color is None:
color = COLOR_MAP.get(record.levelno)
if color:
return colorize(color, message)
else:
return message
class SafeEncodingWriter:
def __init__(self, stream: IO) -> None:
self.stream = stream
self.encoding = getattr(stream, 'encoding', 'ascii') or 'ascii'
def write(self, data: str) -> None:
try:
self.stream.write(data)
except UnicodeEncodeError:
self.stream.write(data.encode(self.encoding, 'replace').decode(self.encoding))
def flush(self) -> None:
if hasattr(self.stream, 'flush'):
self.stream.flush()
class LastMessagesWriter:
def __init__(self, app: "Sphinx", stream: IO) -> None:
self.app = app
def write(self, data: str) -> None:
self.app.messagelog.append(data)
def setup(app: "Sphinx", status: IO, warning: IO) -> None:
logger = logging.getLogger(NAMESPACE)
logger.setLevel(logging.DEBUG)
logger.propagate = False
for handler in logger.handlers[:]:
logger.removeHandler(handler)
info_handler = NewLineStreamHandler(SafeEncodingWriter(status))
info_handler.addFilter(InfoFilter())
info_handler.addFilter(InfoLogRecordTranslator(app))
info_handler.setLevel(VERBOSITY_MAP[app.verbosity])
info_handler.setFormatter(ColorizeFormatter())
warning_handler = WarningStreamHandler(SafeEncodingWriter(warning))
warning_handler.addFilter(WarningSuppressor(app))
warning_handler.addFilter(WarningLogRecordTranslator(app))
warning_handler.addFilter(WarningIsErrorFilter(app))
warning_handler.addFilter(OnceFilter())
warning_handler.setLevel(logging.WARNING)
warning_handler.setFormatter(ColorizeFormatter())
messagelog_handler = logging.StreamHandler(LastMessagesWriter(app, status))
messagelog_handler.addFilter(InfoFilter())
messagelog_handler.setLevel(VERBOSITY_MAP[app.verbosity])
messagelog_handler.setFormatter(ColorizeFormatter())
logger.addHandler(info_handler)
logger.addHandler(warning_handler)
logger.addHandler(messagelog_handler)
| true
| true
|
1c480ee01fd5c048b152028cc1001c86ca680a28
| 994
|
py
|
Python
|
trac/upgrades/db27.py
|
tiagoeckhardt/trac
|
b18c226195bfed8cd19cba97c6f03bd54dbbc044
|
[
"BSD-3-Clause"
] | null | null | null |
trac/upgrades/db27.py
|
tiagoeckhardt/trac
|
b18c226195bfed8cd19cba97c6f03bd54dbbc044
|
[
"BSD-3-Clause"
] | null | null | null |
trac/upgrades/db27.py
|
tiagoeckhardt/trac
|
b18c226195bfed8cd19cba97c6f03bd54dbbc044
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2012-2019 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at https://trac.edgewall.org/.
from trac.db import Table, Column, DatabaseManager
def do_upgrade(env, ver, cursor):
"""Modify the cache table to use an integer id."""
# No need to keep the previous content
cursor.execute("DROP TABLE cache")
table = Table('cache', key='id')[
Column('id', type='int'),
Column('generation', type='int'),
Column('key'),
]
db_connector, _ = DatabaseManager(env).get_connector()
for stmt in db_connector.to_sql(table):
cursor.execute(stmt)
| 34.275862
| 67
| 0.698189
|
from trac.db import Table, Column, DatabaseManager
def do_upgrade(env, ver, cursor):
cursor.execute("DROP TABLE cache")
table = Table('cache', key='id')[
Column('id', type='int'),
Column('generation', type='int'),
Column('key'),
]
db_connector, _ = DatabaseManager(env).get_connector()
for stmt in db_connector.to_sql(table):
cursor.execute(stmt)
| true
| true
|
1c48116e567540872eb35d8210fe6c3f7660b7f8
| 3,913
|
py
|
Python
|
cryptography/hazmat/bindings/utils.py
|
derwolfe/cryptography
|
a6112133d6797313ea8fe741daf25178b2abe25c
|
[
"Apache-2.0"
] | null | null | null |
cryptography/hazmat/bindings/utils.py
|
derwolfe/cryptography
|
a6112133d6797313ea8fe741daf25178b2abe25c
|
[
"Apache-2.0"
] | null | null | null |
cryptography/hazmat/bindings/utils.py
|
derwolfe/cryptography
|
a6112133d6797313ea8fe741daf25178b2abe25c
|
[
"Apache-2.0"
] | 3
|
2017-04-07T12:02:22.000Z
|
2020-03-23T12:11:55.000Z
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
import binascii
import sys
import cffi
def build_ffi(module_prefix, modules, pre_include="", post_include="",
libraries=[], extra_compile_args=[], extra_link_args=[]):
"""
Modules listed in ``modules`` should have the following attributes:
* ``INCLUDES``: A string containing C includes.
* ``TYPES``: A string containing C declarations for types.
* ``FUNCTIONS``: A string containing C declarations for functions.
* ``MACROS``: A string containing C declarations for any macros.
* ``CUSTOMIZATIONS``: A string containing arbitrary top-level C code, this
can be used to do things like test for a define and provide an
alternate implementation based on that.
* ``CONDITIONAL_NAMES``: A dict mapping strings of condition names from the
library to a list of names which will not be present without the
condition.
"""
ffi = cffi.FFI()
types = []
includes = []
functions = []
macros = []
customizations = []
for name in modules:
module_name = module_prefix + name
__import__(module_name)
module = sys.modules[module_name]
types.append(module.TYPES)
macros.append(module.MACROS)
functions.append(module.FUNCTIONS)
includes.append(module.INCLUDES)
customizations.append(module.CUSTOMIZATIONS)
cdef_sources = types + functions + macros
ffi.cdef("\n".join(cdef_sources))
# We include functions here so that if we got any of their definitions
# wrong, the underlying C compiler will explode. In C you are allowed
# to re-declare a function if it has the same signature. That is:
# int foo(int);
# int foo(int);
# is legal, but the following will fail to compile:
# int foo(int);
# int foo(short);
source = "\n".join(
[pre_include] +
includes +
[post_include] +
functions +
customizations
)
lib = ffi.verify(
source=source,
modulename=_create_modulename(cdef_sources, source, sys.version),
libraries=libraries,
ext_package="cryptography",
extra_compile_args=extra_compile_args,
extra_link_args=extra_link_args,
)
for name in modules:
module_name = module_prefix + name
module = sys.modules[module_name]
for condition, names in module.CONDITIONAL_NAMES.items():
if not getattr(lib, condition):
for name in names:
delattr(lib, name)
return ffi, lib
def _create_modulename(cdef_sources, source, sys_version):
"""
cffi creates a modulename internally that incorporates the cffi version.
This will cause cryptography's wheels to break when the version of cffi
the user has does not match what was used when building the wheel. To
resolve this we build our own modulename that uses most of the same code
from cffi but elides the version key.
"""
key = '\x00'.join([sys_version[:3], source] + cdef_sources)
key = key.encode('utf-8')
k1 = hex(binascii.crc32(key[0::2]) & 0xffffffff)
k1 = k1.lstrip('0x').rstrip('L')
k2 = hex(binascii.crc32(key[1::2]) & 0xffffffff)
k2 = k2.lstrip('0').rstrip('L')
return '_Cryptography_cffi_{0}{1}'.format(k1, k2)
| 35.899083
| 79
| 0.669052
|
from __future__ import absolute_import, division, print_function
import binascii
import sys
import cffi
def build_ffi(module_prefix, modules, pre_include="", post_include="",
libraries=[], extra_compile_args=[], extra_link_args=[]):
ffi = cffi.FFI()
types = []
includes = []
functions = []
macros = []
customizations = []
for name in modules:
module_name = module_prefix + name
__import__(module_name)
module = sys.modules[module_name]
types.append(module.TYPES)
macros.append(module.MACROS)
functions.append(module.FUNCTIONS)
includes.append(module.INCLUDES)
customizations.append(module.CUSTOMIZATIONS)
cdef_sources = types + functions + macros
ffi.cdef("\n".join(cdef_sources))
source = "\n".join(
[pre_include] +
includes +
[post_include] +
functions +
customizations
)
lib = ffi.verify(
source=source,
modulename=_create_modulename(cdef_sources, source, sys.version),
libraries=libraries,
ext_package="cryptography",
extra_compile_args=extra_compile_args,
extra_link_args=extra_link_args,
)
for name in modules:
module_name = module_prefix + name
module = sys.modules[module_name]
for condition, names in module.CONDITIONAL_NAMES.items():
if not getattr(lib, condition):
for name in names:
delattr(lib, name)
return ffi, lib
def _create_modulename(cdef_sources, source, sys_version):
key = '\x00'.join([sys_version[:3], source] + cdef_sources)
key = key.encode('utf-8')
k1 = hex(binascii.crc32(key[0::2]) & 0xffffffff)
k1 = k1.lstrip('0x').rstrip('L')
k2 = hex(binascii.crc32(key[1::2]) & 0xffffffff)
k2 = k2.lstrip('0').rstrip('L')
return '_Cryptography_cffi_{0}{1}'.format(k1, k2)
| true
| true
|
1c4812afed716716d770b120fccdcf8bd533a504
| 584
|
py
|
Python
|
aisapi/const.py
|
sviete/AIS-api
|
80694a235b34b99e0ee608e3e3c175732e5be258
|
[
"MIT"
] | 1
|
2018-01-03T11:26:01.000Z
|
2018-01-03T11:26:01.000Z
|
aisapi/const.py
|
sviete/AIS-api
|
80694a235b34b99e0ee608e3e3c175732e5be258
|
[
"MIT"
] | null | null | null |
aisapi/const.py
|
sviete/AIS-api
|
80694a235b34b99e0ee608e3e3c175732e5be258
|
[
"MIT"
] | 2
|
2018-08-14T04:30:20.000Z
|
2018-08-14T04:31:16.000Z
|
"""Constants."""
AIS_WS_TUNE_IN_URL = "http://opml.radiotime.com/"
AIS_WS_AUDIO_TYPE_URL = (
"https://powiedz.co/ords/dom/dom/audio_type?nature={audio_nature}"
)
AIS_WS_AUDIO_NAME_URL = (
"https://powiedz.co/ords/dom/dom/audio_name?nature={audio_nature}&type={audio_type}"
)
AIS_WS_AUDIOBOOKS_URL = "https://wolnelektury.pl/api/audiobooks/?format=json"
AIS_WS_AUDIO_INFO = "https://powiedz.co/ords/dom/dom/get_audio_full_info"
AIS_WS_COMMAND_URL = "{ais_url}/command"
AIS_WS_AUDIO_STATUS_URL = "{ais_url}/audio_status"
AIS_WS_TTS_URL = "{ais_url}/text_to_speech?text={text}"
| 38.933333
| 88
| 0.765411
|
AIS_WS_TUNE_IN_URL = "http://opml.radiotime.com/"
AIS_WS_AUDIO_TYPE_URL = (
"https://powiedz.co/ords/dom/dom/audio_type?nature={audio_nature}"
)
AIS_WS_AUDIO_NAME_URL = (
"https://powiedz.co/ords/dom/dom/audio_name?nature={audio_nature}&type={audio_type}"
)
AIS_WS_AUDIOBOOKS_URL = "https://wolnelektury.pl/api/audiobooks/?format=json"
AIS_WS_AUDIO_INFO = "https://powiedz.co/ords/dom/dom/get_audio_full_info"
AIS_WS_COMMAND_URL = "{ais_url}/command"
AIS_WS_AUDIO_STATUS_URL = "{ais_url}/audio_status"
AIS_WS_TTS_URL = "{ais_url}/text_to_speech?text={text}"
| true
| true
|
1c4812d20adc26bbc3dee861735190b37f283c00
| 3,500
|
py
|
Python
|
fake_gen/factories/datetimes.py
|
psafont/fake-gen
|
a3d74fdb54c3d4171ce2ba6ab0ad15791cf5b7e5
|
[
"MIT"
] | 1
|
2020-04-14T09:34:58.000Z
|
2020-04-14T09:34:58.000Z
|
fake_gen/factories/datetimes.py
|
psafont/fake-gen
|
a3d74fdb54c3d4171ce2ba6ab0ad15791cf5b7e5
|
[
"MIT"
] | 1
|
2018-12-04T10:02:57.000Z
|
2018-12-04T10:02:57.000Z
|
fake_gen/factories/datetimes.py
|
psafont/fake-gen
|
a3d74fdb54c3d4171ce2ba6ab0ad15791cf5b7e5
|
[
"MIT"
] | null | null | null |
import random
import datetime
from fake_gen.errors import InvalidFieldType
from fake_gen.base import Factory, DependentField
class RandomDateFactory(Factory):
"""
Generates a random dates between 2 dates.
:type minimum: datetime.datetime
:type maximum: datetime.datetime
Example:
>>> start = datetime.datetime(2013, 10, 1, 1, 1, 0, 0)
>>> end = datetime.datetime(2013, 10, 1, 1, 1, 0, 1)
>>> dates = list(RandomDateFactory(start, end).generate(100))
>>> len(dates)
100
>>> datetime.datetime(2013, 10, 1, 1, 1, 0, 0) in dates
True
>>> datetime.datetime(2013, 10, 1, 1, 1, 0, 1) in dates
True
>>> datetime.datetime(2013, 10, 1, 1, 1, 0, 2) in dates
False
>>> datetime.datetime(2013, 10, 1, 2, 1, 0, 2) in dates
False
"""
def __init__(self, minimum, maximum):
super(RandomDateFactory, self).__init__()
self._maximum = maximum
self._minimum = minimum
delta = maximum - minimum
self._delta_seconds = delta.total_seconds()
self._sign = -1 if self._delta_seconds < 0 else 1
self._delta_seconds *= self._sign
def __call__(self):
delta = datetime.timedelta(seconds=(random.random() * self._delta_seconds))
return self._minimum + (self._sign * delta)
class DateIntervalFactory(Factory):
"""
Generates datetime objects starting from `base` which each iteration adding `delta` to it.
:type base: datetime.datetime
:type delta: datetime.timedelta
Example:
>>> start = datetime.datetime(2013, 10, 1)
>>> interval = datetime.timedelta(days=1)
>>> datetimes = list(DateIntervalFactory(start, interval).generate(3))
>>> len(datetimes)
3
>>> datetimes
[datetime.datetime(2013, 10, 1, 0, 0), ..., datetime.datetime(2013, 10, 3, 0, 0)]
"""
def __init__(self, base, delta):
super(DateIntervalFactory, self).__init__()
self._base = base
self._delta = delta
def __call__(self):
return self._base + self.current_index * self._delta
class RelativeToDatetimeField(DependentField):
"""
Adds a datetime.timedelta to a datetime value from an dependent field.
"""
def __init__(self, datetime_field_name, delta):
super(RelativeToDatetimeField, self).__init__([datetime_field_name])
self._datetime_field_name = datetime_field_name
self._delta = delta
def __call__(self):
other_field = self.depending_fields[self._datetime_field_name]
if not isinstance(other_field, datetime.datetime):
raise InvalidFieldType("field {} isn't of type datetime.datetime")
return other_field + self._delta
class AlignedRelativeDatetimeField(DependentField):
"""
Returns another datetime field, only aligned to specific time quantums.
"""
def __init__(self, other_dateime_field, minute_alignment):
if minute_alignment <= 0 or minute_alignment > 60:
raise ValueError("minute_alignment needs to be a positive integer between 1 - 60")
super(AlignedRelativeDatetimeField, self).__init__([other_dateime_field])
self._other_datetime_field = other_dateime_field
self._minute_alignment = minute_alignment
def __call__(self):
super(AlignedRelativeDatetimeField, self).__call__()
other_value = self.depending_fields[self._other_datetime_field]
return other_value - datetime.timedelta(minutes=other_value.minute % self._minute_alignment)
| 37.634409
| 100
| 0.676857
|
import random
import datetime
from fake_gen.errors import InvalidFieldType
from fake_gen.base import Factory, DependentField
class RandomDateFactory(Factory):
def __init__(self, minimum, maximum):
super(RandomDateFactory, self).__init__()
self._maximum = maximum
self._minimum = minimum
delta = maximum - minimum
self._delta_seconds = delta.total_seconds()
self._sign = -1 if self._delta_seconds < 0 else 1
self._delta_seconds *= self._sign
def __call__(self):
delta = datetime.timedelta(seconds=(random.random() * self._delta_seconds))
return self._minimum + (self._sign * delta)
class DateIntervalFactory(Factory):
def __init__(self, base, delta):
super(DateIntervalFactory, self).__init__()
self._base = base
self._delta = delta
def __call__(self):
return self._base + self.current_index * self._delta
class RelativeToDatetimeField(DependentField):
def __init__(self, datetime_field_name, delta):
super(RelativeToDatetimeField, self).__init__([datetime_field_name])
self._datetime_field_name = datetime_field_name
self._delta = delta
def __call__(self):
other_field = self.depending_fields[self._datetime_field_name]
if not isinstance(other_field, datetime.datetime):
raise InvalidFieldType("field {} isn't of type datetime.datetime")
return other_field + self._delta
class AlignedRelativeDatetimeField(DependentField):
def __init__(self, other_dateime_field, minute_alignment):
if minute_alignment <= 0 or minute_alignment > 60:
raise ValueError("minute_alignment needs to be a positive integer between 1 - 60")
super(AlignedRelativeDatetimeField, self).__init__([other_dateime_field])
self._other_datetime_field = other_dateime_field
self._minute_alignment = minute_alignment
def __call__(self):
super(AlignedRelativeDatetimeField, self).__call__()
other_value = self.depending_fields[self._other_datetime_field]
return other_value - datetime.timedelta(minutes=other_value.minute % self._minute_alignment)
| true
| true
|
1c481379434ada17f2a9088c56489beafbf8d172
| 4,503
|
py
|
Python
|
reconcile/ecr_mirror.py
|
bhushanthakur93/qontract-reconcile
|
fd8eea9f92d353224113955d08e3592864e37df8
|
[
"Apache-2.0"
] | null | null | null |
reconcile/ecr_mirror.py
|
bhushanthakur93/qontract-reconcile
|
fd8eea9f92d353224113955d08e3592864e37df8
|
[
"Apache-2.0"
] | null | null | null |
reconcile/ecr_mirror.py
|
bhushanthakur93/qontract-reconcile
|
fd8eea9f92d353224113955d08e3592864e37df8
|
[
"Apache-2.0"
] | null | null | null |
import base64
import logging
from sretoolbox.container import Image
from sretoolbox.container import Skopeo
from sretoolbox.container.skopeo import SkopeoCmdError
from sretoolbox.utils import threaded
from reconcile import queries
from reconcile.utils.aws_api import AWSApi
from reconcile.utils.secret_reader import SecretReader
QONTRACT_INTEGRATION = "ecr-mirror"
LOG = logging.getLogger(__name__)
class EcrMirror:
def __init__(self, instance, dry_run):
self.dry_run = dry_run
self.instance = instance
self.settings = queries.get_app_interface_settings()
self.secret_reader = SecretReader(settings=self.settings)
self.skopeo_cli = Skopeo(dry_run)
self.error = False
identifier = instance["identifier"]
account = instance["account"]
region = instance.get("region")
self.aws_cli = AWSApi(
thread_pool_size=1,
accounts=[self._get_aws_account_info(account)],
settings=self.settings,
init_ecr_auth_tokens=True,
)
self.aws_cli.map_ecr_resources()
self.ecr_uri = self._get_image_uri(
account=account,
repository=identifier,
)
if self.ecr_uri is None:
self.error = True
LOG.error(f"Could not find the ECR repository {identifier}")
self.ecr_username, self.ecr_password = self._get_ecr_creds(
account=account,
region=region,
)
self.ecr_auth = f"{self.ecr_username}:{self.ecr_password}"
self.image_username = None
self.image_password = None
self.image_auth = None
pull_secret = self.instance["mirror"]["pullCredentials"]
if pull_secret is not None:
raw_data = self.secret_reader.read_all(pull_secret)
self.image_username = raw_data["user"]
self.image_password = raw_data["token"]
self.image_auth = f"{self.image_username}:{self.image_password}"
def run(self):
if self.error:
return
ecr_mirror = Image(
self.ecr_uri, username=self.ecr_username, password=self.ecr_password
)
image = Image(
self.instance["mirror"]["url"],
username=self.image_username,
password=self.image_password,
)
LOG.debug("[checking %s -> %s]", image, ecr_mirror)
for tag in image:
if tag not in ecr_mirror:
try:
self.skopeo_cli.copy(
src_image=image[tag],
src_creds=self.image_auth,
dst_image=ecr_mirror[tag],
dest_creds=self.ecr_auth,
)
except SkopeoCmdError as details:
LOG.error("[%s]", details)
def _get_ecr_creds(self, account, region):
if region is None:
region = self.aws_cli.accounts[account]["resourcesDefaultRegion"]
auth_token = f"{account}/{region}"
data = self.aws_cli.auth_tokens[auth_token]
auth_data = data["authorizationData"][0]
token = auth_data["authorizationToken"]
password = base64.b64decode(token).decode("utf-8").split(":")[1]
return "AWS", password
def _get_image_uri(self, account, repository):
for repo in self.aws_cli.resources[account]["ecr"]:
if repo["repositoryName"] == repository:
return repo["repositoryUri"]
@staticmethod
def _get_aws_account_info(account):
for account_info in queries.get_aws_accounts():
if "name" not in account_info:
continue
if account_info["name"] != account:
continue
return account_info
def worker(ecr_mirror_instance):
return ecr_mirror_instance.run()
def run(dry_run, thread_pool_size=10):
namespaces = queries.get_namespaces()
tfrs_to_mirror = []
for namespace in namespaces:
if namespace["terraformResources"] is None:
continue
for tfr in namespace["terraformResources"]:
if tfr["provider"] != "ecr":
continue
if tfr["mirror"] is None:
continue
tfrs_to_mirror.append(tfr)
work_list = threaded.run(
EcrMirror, tfrs_to_mirror, thread_pool_size=thread_pool_size, dry_run=dry_run
)
threaded.run(worker, work_list, thread_pool_size=thread_pool_size)
| 31.711268
| 85
| 0.613147
|
import base64
import logging
from sretoolbox.container import Image
from sretoolbox.container import Skopeo
from sretoolbox.container.skopeo import SkopeoCmdError
from sretoolbox.utils import threaded
from reconcile import queries
from reconcile.utils.aws_api import AWSApi
from reconcile.utils.secret_reader import SecretReader
QONTRACT_INTEGRATION = "ecr-mirror"
LOG = logging.getLogger(__name__)
class EcrMirror:
def __init__(self, instance, dry_run):
self.dry_run = dry_run
self.instance = instance
self.settings = queries.get_app_interface_settings()
self.secret_reader = SecretReader(settings=self.settings)
self.skopeo_cli = Skopeo(dry_run)
self.error = False
identifier = instance["identifier"]
account = instance["account"]
region = instance.get("region")
self.aws_cli = AWSApi(
thread_pool_size=1,
accounts=[self._get_aws_account_info(account)],
settings=self.settings,
init_ecr_auth_tokens=True,
)
self.aws_cli.map_ecr_resources()
self.ecr_uri = self._get_image_uri(
account=account,
repository=identifier,
)
if self.ecr_uri is None:
self.error = True
LOG.error(f"Could not find the ECR repository {identifier}")
self.ecr_username, self.ecr_password = self._get_ecr_creds(
account=account,
region=region,
)
self.ecr_auth = f"{self.ecr_username}:{self.ecr_password}"
self.image_username = None
self.image_password = None
self.image_auth = None
pull_secret = self.instance["mirror"]["pullCredentials"]
if pull_secret is not None:
raw_data = self.secret_reader.read_all(pull_secret)
self.image_username = raw_data["user"]
self.image_password = raw_data["token"]
self.image_auth = f"{self.image_username}:{self.image_password}"
def run(self):
if self.error:
return
ecr_mirror = Image(
self.ecr_uri, username=self.ecr_username, password=self.ecr_password
)
image = Image(
self.instance["mirror"]["url"],
username=self.image_username,
password=self.image_password,
)
LOG.debug("[checking %s -> %s]", image, ecr_mirror)
for tag in image:
if tag not in ecr_mirror:
try:
self.skopeo_cli.copy(
src_image=image[tag],
src_creds=self.image_auth,
dst_image=ecr_mirror[tag],
dest_creds=self.ecr_auth,
)
except SkopeoCmdError as details:
LOG.error("[%s]", details)
def _get_ecr_creds(self, account, region):
if region is None:
region = self.aws_cli.accounts[account]["resourcesDefaultRegion"]
auth_token = f"{account}/{region}"
data = self.aws_cli.auth_tokens[auth_token]
auth_data = data["authorizationData"][0]
token = auth_data["authorizationToken"]
password = base64.b64decode(token).decode("utf-8").split(":")[1]
return "AWS", password
def _get_image_uri(self, account, repository):
for repo in self.aws_cli.resources[account]["ecr"]:
if repo["repositoryName"] == repository:
return repo["repositoryUri"]
@staticmethod
def _get_aws_account_info(account):
for account_info in queries.get_aws_accounts():
if "name" not in account_info:
continue
if account_info["name"] != account:
continue
return account_info
def worker(ecr_mirror_instance):
return ecr_mirror_instance.run()
def run(dry_run, thread_pool_size=10):
namespaces = queries.get_namespaces()
tfrs_to_mirror = []
for namespace in namespaces:
if namespace["terraformResources"] is None:
continue
for tfr in namespace["terraformResources"]:
if tfr["provider"] != "ecr":
continue
if tfr["mirror"] is None:
continue
tfrs_to_mirror.append(tfr)
work_list = threaded.run(
EcrMirror, tfrs_to_mirror, thread_pool_size=thread_pool_size, dry_run=dry_run
)
threaded.run(worker, work_list, thread_pool_size=thread_pool_size)
| true
| true
|
1c4814da9270cbf4960b91df34b13e65e8cb7550
| 2,195
|
py
|
Python
|
nfv/nfv-vim/nfv_vim/instance_fsm/_instance_state_guest_services_delete.py
|
SidneyAn/nfv
|
5f0262a5b6ea4be59f977b9c587c483cbe0e373d
|
[
"Apache-2.0"
] | 2
|
2020-02-07T19:01:36.000Z
|
2022-02-23T01:41:46.000Z
|
nfv/nfv-vim/nfv_vim/instance_fsm/_instance_state_guest_services_delete.py
|
SidneyAn/nfv
|
5f0262a5b6ea4be59f977b9c587c483cbe0e373d
|
[
"Apache-2.0"
] | 1
|
2021-01-14T12:02:25.000Z
|
2021-01-14T12:02:25.000Z
|
nfv/nfv-vim/nfv_vim/instance_fsm/_instance_state_guest_services_delete.py
|
SidneyAn/nfv
|
5f0262a5b6ea4be59f977b9c587c483cbe0e373d
|
[
"Apache-2.0"
] | 2
|
2021-01-13T08:39:21.000Z
|
2022-02-09T00:21:55.000Z
|
#
# Copyright (c) 2015-2016 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
from nfv_common import debug
from nfv_common import state_machine
from nfv_vim.instance_fsm._instance_defs import INSTANCE_EVENT
from nfv_vim.instance_fsm._instance_defs import INSTANCE_STATE
from nfv_vim.instance_fsm._instance_tasks import GuestServicesDeleteTask
DLOG = debug.debug_get_logger('nfv_vim.state_machine.instance')
class GuestServicesDeleteState(state_machine.State):
"""
Instance - GuestServicesDelete State
"""
def __init__(self, name):
super(GuestServicesDeleteState, self).__init__(name)
def enter(self, instance):
"""
Entering GuestServicesDelete state
"""
DLOG.info("Entering state (%s) for %s." % (self.name, instance.name))
instance.task = GuestServicesDeleteTask(instance)
instance.task.start()
def exit(self, instance):
"""
Exiting GuestServicesDelete state
"""
DLOG.info("Exiting state (%s) for %s." % (self.name, instance.name))
if isinstance(instance.task, GuestServicesDeleteTask):
instance.task.abort()
def transition(self, instance, event, event_data, to_state):
"""
Transition from the GuestServicesDelete state
"""
pass
def handle_event(self, instance, event, event_data=None):
"""
Handle event while in the GuestServicesDelete state
"""
if INSTANCE_EVENT.TASK_STOP == event:
return INSTANCE_STATE.INITIAL
elif INSTANCE_EVENT.TASK_COMPLETED == event:
DLOG.debug("GuestServicesDelete completed for %s." % instance.name)
return INSTANCE_STATE.INITIAL
elif INSTANCE_EVENT.TASK_FAILED == event:
DLOG.info("GuestServicesDelete failed for %s." % instance.name)
return INSTANCE_STATE.INITIAL
elif INSTANCE_EVENT.TASK_TIMEOUT == event:
DLOG.info("GuestServicesDelete timed out for %s." % instance.name)
return INSTANCE_STATE.INITIAL
else:
DLOG.verbose("Ignoring %s event for %s." % (event, instance.name))
return self.name
| 32.279412
| 79
| 0.666515
|
from nfv_common import debug
from nfv_common import state_machine
from nfv_vim.instance_fsm._instance_defs import INSTANCE_EVENT
from nfv_vim.instance_fsm._instance_defs import INSTANCE_STATE
from nfv_vim.instance_fsm._instance_tasks import GuestServicesDeleteTask
DLOG = debug.debug_get_logger('nfv_vim.state_machine.instance')
class GuestServicesDeleteState(state_machine.State):
def __init__(self, name):
super(GuestServicesDeleteState, self).__init__(name)
def enter(self, instance):
DLOG.info("Entering state (%s) for %s." % (self.name, instance.name))
instance.task = GuestServicesDeleteTask(instance)
instance.task.start()
def exit(self, instance):
DLOG.info("Exiting state (%s) for %s." % (self.name, instance.name))
if isinstance(instance.task, GuestServicesDeleteTask):
instance.task.abort()
def transition(self, instance, event, event_data, to_state):
pass
def handle_event(self, instance, event, event_data=None):
if INSTANCE_EVENT.TASK_STOP == event:
return INSTANCE_STATE.INITIAL
elif INSTANCE_EVENT.TASK_COMPLETED == event:
DLOG.debug("GuestServicesDelete completed for %s." % instance.name)
return INSTANCE_STATE.INITIAL
elif INSTANCE_EVENT.TASK_FAILED == event:
DLOG.info("GuestServicesDelete failed for %s." % instance.name)
return INSTANCE_STATE.INITIAL
elif INSTANCE_EVENT.TASK_TIMEOUT == event:
DLOG.info("GuestServicesDelete timed out for %s." % instance.name)
return INSTANCE_STATE.INITIAL
else:
DLOG.verbose("Ignoring %s event for %s." % (event, instance.name))
return self.name
| true
| true
|
1c481554ae98a5e2b009109d0fcec1bafdec2aec
| 4,338
|
py
|
Python
|
pc/userInput.py
|
martinloland/rov
|
542ca17daeb17109ac686f979ed3bb1dfb64b846
|
[
"MIT"
] | null | null | null |
pc/userInput.py
|
martinloland/rov
|
542ca17daeb17109ac686f979ed3bb1dfb64b846
|
[
"MIT"
] | null | null | null |
pc/userInput.py
|
martinloland/rov
|
542ca17daeb17109ac686f979ed3bb1dfb64b846
|
[
"MIT"
] | null | null | null |
'''
userInput.py
- Read input from keyboard and USB-controller
- Initiate functions to make the change
'''
def getUserInput():
buttons = []
if joystickConnected:
buttons = getJoystick(buttons)
# Toggle keys
for event in GAME_EVENTS.get():
if event.type == GAME_GLOBALS.QUIT:
buttons.append('quit')
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_RETURN:
buttons.append('return')
# close
if event.key == pygame.K_ESCAPE:
buttons.append('quit')
# LED
if event.key == pygame.K_l:
buttons.append('led')
# info
if event.key == pygame.K_i:
buttons.append('info')
# info
if event.key == pygame.K_p:
buttons.append('motorin')
# overlay
if event.key == pygame.K_o:
buttons.append('overlay')
# reset gimbal
if event.key == pygame.K_q:
buttons.append('resetGimbal')
# snapshot
if event.key == pygame.K_SPACE:
buttons.append('snapshot')
# snapshot
if event.key == pygame.K_j:
buttons.append('joystick')
# Gimbal
keys = pygame.key.get_pressed()
if keys[pygame.K_a]:
buttons.append('gLeft')
if keys[pygame.K_d]:
buttons.append('gRight')
if keys[pygame.K_s]:
buttons.append('gUp')
if keys[pygame.K_w]:
buttons.append('gDown')
if keys[pygame.K_q]:
buttons.append('gReset')
# Motors
if keys[pygame.K_UP]:
buttons.append('mForward')
if keys[pygame.K_DOWN]:
buttons.append('mBack')
if keys[pygame.K_LEFT]:
buttons.append('mLeft')
if keys[pygame.K_RIGHT]:
buttons.append('mRight')
if keys[pygame.K_z]:
buttons.append('mDecrease')
if keys[pygame.K_c]:
buttons.append('mIncrease')
if keys[pygame.K_r]:
buttons.append('mUp')
if keys[pygame.K_f]:
buttons.append('mDown')
actOnInput(buttons)
def getJoystick(buttons):
global lastout
'''
0:L left/right
1:up/down
2: R left/right
3: up/down
4: 1 increase
5: 2 motors
6: 3 decrease
7: 4 overlay
8: L1 snapshot
9: R1 up
11: R2 down
12: select info
13: start exit
14: L3resetGimbal
15: R3 led
'''
out = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
it = 0 #iterator
# Read axis
for i in range(0, j.get_numaxes()):
out[it] = j.get_axis(i)
it+=1
#Read input from buttons
for i in range(0, j.get_numbuttons()):
out[it] = j.get_button(i)
it+=1
# HOLD KEYS
# gimbal
if out[0]>0.8:
buttons.append('gRight')
if out[0]<-0.8:
buttons.append('gLeft')
if out[1]>0.8:
buttons.append('gUp')
if out[1]<-0.8:
buttons.append('gDown')
#motor
if out[2]>0.8:
buttons.append('mRight')
if out[2]<-0.8:
buttons.append('mLeft')
if out[3]>0.8:
buttons.append('mBack')
if out[3]<-0.8:
buttons.append('mForward')
if out[4]:
buttons.append('mIncrease')
if out[6]:
buttons.append('mDecrease')
if out[9]:
buttons.append('mUp')
if out[11]:
buttons.append('mDown')
# TOGGLE KEYS
if out[12] and out[12] != lastout[12]:
buttons.append('info')
if out[13] and out[13] != lastout[13]:
buttons.append('quit')
if out[14] and out[14] != lastout[14]:
buttons.append('resetGimbal')
if out[15] and out[15] != lastout[15]:
buttons.append('led')
if out[5] and out[5] != lastout[5]:
buttons.append('motorin')
if out[7] and out[7] != lastout[7]:
buttons.append('overlay')
if out[8] and out[8] != lastout[8]:
buttons.append('snapshot')
lastout = out
return buttons
def actOnInput(buttons):
if any("return" in s for s in buttons):
toggle_fullscreen()
if any("quit" in s for s in buttons):
closeProgram()
if any("snapshot" in s for s in buttons):
snapshot()
if any("overlay" in s for s in buttons):
if ui.overlay:
ui.overlay = False
elif not ui.overlay:
ui.overlay = True
if any("motorin" in s for s in buttons):
if ui.motorInfo:
ui.motorInfo = False
elif not ui.motorInfo:
ui.motorInfo = True
if any("info" in s for s in buttons):
if ui.info:
ui.info = False
elif not ui.info:
ui.info = True
if any("led" in s for s in buttons):
if act.led:
act.led = 0
elif not act.led:
act.led = 1
if any("joystick" in s for s in buttons):
if ui.joystick:
ui.joystick = False
elif not ui.joystick:
ui.joystick = True
gimbal(buttons)
motor(buttons)
| 22.59375
| 46
| 0.620793
|
def getUserInput():
buttons = []
if joystickConnected:
buttons = getJoystick(buttons)
for event in GAME_EVENTS.get():
if event.type == GAME_GLOBALS.QUIT:
buttons.append('quit')
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_RETURN:
buttons.append('return')
if event.key == pygame.K_ESCAPE:
buttons.append('quit')
if event.key == pygame.K_l:
buttons.append('led')
if event.key == pygame.K_i:
buttons.append('info')
if event.key == pygame.K_p:
buttons.append('motorin')
if event.key == pygame.K_o:
buttons.append('overlay')
if event.key == pygame.K_q:
buttons.append('resetGimbal')
if event.key == pygame.K_SPACE:
buttons.append('snapshot')
if event.key == pygame.K_j:
buttons.append('joystick')
keys = pygame.key.get_pressed()
if keys[pygame.K_a]:
buttons.append('gLeft')
if keys[pygame.K_d]:
buttons.append('gRight')
if keys[pygame.K_s]:
buttons.append('gUp')
if keys[pygame.K_w]:
buttons.append('gDown')
if keys[pygame.K_q]:
buttons.append('gReset')
if keys[pygame.K_UP]:
buttons.append('mForward')
if keys[pygame.K_DOWN]:
buttons.append('mBack')
if keys[pygame.K_LEFT]:
buttons.append('mLeft')
if keys[pygame.K_RIGHT]:
buttons.append('mRight')
if keys[pygame.K_z]:
buttons.append('mDecrease')
if keys[pygame.K_c]:
buttons.append('mIncrease')
if keys[pygame.K_r]:
buttons.append('mUp')
if keys[pygame.K_f]:
buttons.append('mDown')
actOnInput(buttons)
def getJoystick(buttons):
global lastout
out = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
it = 0
for i in range(0, j.get_numaxes()):
out[it] = j.get_axis(i)
it+=1
for i in range(0, j.get_numbuttons()):
out[it] = j.get_button(i)
it+=1
if out[0]>0.8:
buttons.append('gRight')
if out[0]<-0.8:
buttons.append('gLeft')
if out[1]>0.8:
buttons.append('gUp')
if out[1]<-0.8:
buttons.append('gDown')
if out[2]>0.8:
buttons.append('mRight')
if out[2]<-0.8:
buttons.append('mLeft')
if out[3]>0.8:
buttons.append('mBack')
if out[3]<-0.8:
buttons.append('mForward')
if out[4]:
buttons.append('mIncrease')
if out[6]:
buttons.append('mDecrease')
if out[9]:
buttons.append('mUp')
if out[11]:
buttons.append('mDown')
if out[12] and out[12] != lastout[12]:
buttons.append('info')
if out[13] and out[13] != lastout[13]:
buttons.append('quit')
if out[14] and out[14] != lastout[14]:
buttons.append('resetGimbal')
if out[15] and out[15] != lastout[15]:
buttons.append('led')
if out[5] and out[5] != lastout[5]:
buttons.append('motorin')
if out[7] and out[7] != lastout[7]:
buttons.append('overlay')
if out[8] and out[8] != lastout[8]:
buttons.append('snapshot')
lastout = out
return buttons
def actOnInput(buttons):
if any("return" in s for s in buttons):
toggle_fullscreen()
if any("quit" in s for s in buttons):
closeProgram()
if any("snapshot" in s for s in buttons):
snapshot()
if any("overlay" in s for s in buttons):
if ui.overlay:
ui.overlay = False
elif not ui.overlay:
ui.overlay = True
if any("motorin" in s for s in buttons):
if ui.motorInfo:
ui.motorInfo = False
elif not ui.motorInfo:
ui.motorInfo = True
if any("info" in s for s in buttons):
if ui.info:
ui.info = False
elif not ui.info:
ui.info = True
if any("led" in s for s in buttons):
if act.led:
act.led = 0
elif not act.led:
act.led = 1
if any("joystick" in s for s in buttons):
if ui.joystick:
ui.joystick = False
elif not ui.joystick:
ui.joystick = True
gimbal(buttons)
motor(buttons)
| true
| true
|
1c4815d09dda9708289263b84fc70dfe99fb9044
| 4,006
|
py
|
Python
|
src/py/flwr/server/grpc_server/flower_service_servicer.py
|
Chris-george-anil/flower
|
98fb2fcde273c1226cc1f2e1638c1e4d8f35815c
|
[
"Apache-2.0"
] | 895
|
2020-03-22T20:34:16.000Z
|
2022-03-31T15:20:42.000Z
|
src/py/flwr/server/grpc_server/flower_service_servicer.py
|
Chris-george-anil/flower
|
98fb2fcde273c1226cc1f2e1638c1e4d8f35815c
|
[
"Apache-2.0"
] | 322
|
2020-02-19T10:16:33.000Z
|
2022-03-31T09:49:08.000Z
|
src/py/flwr/server/grpc_server/flower_service_servicer.py
|
Chris-george-anil/flower
|
98fb2fcde273c1226cc1f2e1638c1e4d8f35815c
|
[
"Apache-2.0"
] | 234
|
2020-03-31T10:52:16.000Z
|
2022-03-31T14:04:42.000Z
|
# Copyright 2020 Adap GmbH. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Servicer for FlowerService.
Relevant knowledge for reading this modules code:
- https://github.com/grpc/grpc/blob/master/doc/statuscodes.md
"""
from typing import Callable, Iterator
import grpc
from flwr.proto import transport_pb2_grpc
from flwr.proto.transport_pb2 import ClientMessage, ServerMessage
from flwr.server.client_manager import ClientManager
from flwr.server.grpc_server.grpc_bridge import GRPCBridge
from flwr.server.grpc_server.grpc_client_proxy import GrpcClientProxy
def default_bridge_factory() -> GRPCBridge:
"""Return GRPCBridge instance."""
return GRPCBridge()
def default_grpc_client_factory(cid: str, bridge: GRPCBridge) -> GrpcClientProxy:
"""Return GrpcClientProxy instance."""
return GrpcClientProxy(cid=cid, bridge=bridge)
def register_client(
client_manager: ClientManager,
client: GrpcClientProxy,
context: grpc.ServicerContext,
) -> bool:
"""Try registering GrpcClientProxy with ClientManager."""
is_success = client_manager.register(client)
if is_success:
def rpc_termination_callback() -> None:
client.bridge.close()
client_manager.unregister(client)
context.add_callback(rpc_termination_callback)
return is_success
class FlowerServiceServicer(transport_pb2_grpc.FlowerServiceServicer):
"""FlowerServiceServicer for bi-directional gRPC message stream."""
def __init__(
self,
client_manager: ClientManager,
grpc_bridge_factory: Callable[[], GRPCBridge] = default_bridge_factory,
grpc_client_factory: Callable[
[str, GRPCBridge], GrpcClientProxy
] = default_grpc_client_factory,
) -> None:
self.client_manager: ClientManager = client_manager
self.grpc_bridge_factory = grpc_bridge_factory
self.client_factory = grpc_client_factory
def Join( # pylint: disable=invalid-name
self,
request_iterator: Iterator[ClientMessage],
context: grpc.ServicerContext,
) -> Iterator[ServerMessage]:
"""Method will be invoked by each GrpcClientProxy which participates in
the network.
Protocol:
- The first message is sent from the server to the client
- Both ServerMessage and ClientMessage are message "wrappers"
wrapping the actual message
- The Join method is (pretty much) protocol unaware
"""
peer = context.peer()
bridge = self.grpc_bridge_factory()
client = self.client_factory(peer, bridge)
is_success = register_client(self.client_manager, client, context)
if is_success:
# Get iterators
client_message_iterator = request_iterator
server_message_iterator = bridge.server_message_iterator()
# All messages will be pushed to client bridge directly
while True:
try:
# Get server message from bridge and yield it
server_message = next(server_message_iterator)
yield server_message
# Wait for client message
client_message = next(client_message_iterator)
bridge.set_client_message(client_message)
except StopIteration:
break
| 36.418182
| 81
| 0.678233
|
from typing import Callable, Iterator
import grpc
from flwr.proto import transport_pb2_grpc
from flwr.proto.transport_pb2 import ClientMessage, ServerMessage
from flwr.server.client_manager import ClientManager
from flwr.server.grpc_server.grpc_bridge import GRPCBridge
from flwr.server.grpc_server.grpc_client_proxy import GrpcClientProxy
def default_bridge_factory() -> GRPCBridge:
return GRPCBridge()
def default_grpc_client_factory(cid: str, bridge: GRPCBridge) -> GrpcClientProxy:
return GrpcClientProxy(cid=cid, bridge=bridge)
def register_client(
client_manager: ClientManager,
client: GrpcClientProxy,
context: grpc.ServicerContext,
) -> bool:
is_success = client_manager.register(client)
if is_success:
def rpc_termination_callback() -> None:
client.bridge.close()
client_manager.unregister(client)
context.add_callback(rpc_termination_callback)
return is_success
class FlowerServiceServicer(transport_pb2_grpc.FlowerServiceServicer):
def __init__(
self,
client_manager: ClientManager,
grpc_bridge_factory: Callable[[], GRPCBridge] = default_bridge_factory,
grpc_client_factory: Callable[
[str, GRPCBridge], GrpcClientProxy
] = default_grpc_client_factory,
) -> None:
self.client_manager: ClientManager = client_manager
self.grpc_bridge_factory = grpc_bridge_factory
self.client_factory = grpc_client_factory
def Join(
self,
request_iterator: Iterator[ClientMessage],
context: grpc.ServicerContext,
) -> Iterator[ServerMessage]:
peer = context.peer()
bridge = self.grpc_bridge_factory()
client = self.client_factory(peer, bridge)
is_success = register_client(self.client_manager, client, context)
if is_success:
client_message_iterator = request_iterator
server_message_iterator = bridge.server_message_iterator()
while True:
try:
server_message = next(server_message_iterator)
yield server_message
client_message = next(client_message_iterator)
bridge.set_client_message(client_message)
except StopIteration:
break
| true
| true
|
1c48168926e014644a7673b353146114eacaca51
| 808
|
py
|
Python
|
multiprocessing/test_pool_async.py
|
Carglglz/micropython-lib
|
07102c56aa1087b97ee313cedc1d89fd20452e11
|
[
"PSF-2.0"
] | 126
|
2019-07-19T14:42:41.000Z
|
2022-03-21T22:22:19.000Z
|
multiprocessing/test_pool_async.py
|
Carglglz/micropython-lib
|
07102c56aa1087b97ee313cedc1d89fd20452e11
|
[
"PSF-2.0"
] | 38
|
2019-08-28T01:46:31.000Z
|
2022-03-17T05:46:51.000Z
|
multiprocessing/test_pool_async.py
|
Carglglz/micropython-lib
|
07102c56aa1087b97ee313cedc1d89fd20452e11
|
[
"PSF-2.0"
] | 55
|
2019-08-02T09:32:33.000Z
|
2021-12-22T11:25:51.000Z
|
import time
from multiprocessing import Pool
def f(x):
return x*x
pool = Pool(4)
future = pool.apply_async(f, (10,))
assert future.get() == 100
def f2(x):
time.sleep(0.5)
return x + 1
future = pool.apply_async(f2, (10,))
iter = 0
while not future.ready():
#print("not ready")
time.sleep(0.1)
iter += 1
assert future.get() == 11
assert iter >= 5 and iter <= 8
t = time.time()
futs = [
pool.apply_async(f2, (10,)),
pool.apply_async(f2, (11,)),
pool.apply_async(f2, (12,)),
]
iter = 0
while True:
#not all(futs):
c = 0
for f in futs:
if not f.ready():
c += 1
if not c:
break
#print("not ready2")
time.sleep(0.1)
iter += 1
assert iter >= 5 and iter <= 8
print("Run 3 parallel sleep(1)'s in: ", time.time() - t)
| 16.833333
| 56
| 0.564356
|
import time
from multiprocessing import Pool
def f(x):
return x*x
pool = Pool(4)
future = pool.apply_async(f, (10,))
assert future.get() == 100
def f2(x):
time.sleep(0.5)
return x + 1
future = pool.apply_async(f2, (10,))
iter = 0
while not future.ready():
time.sleep(0.1)
iter += 1
assert future.get() == 11
assert iter >= 5 and iter <= 8
t = time.time()
futs = [
pool.apply_async(f2, (10,)),
pool.apply_async(f2, (11,)),
pool.apply_async(f2, (12,)),
]
iter = 0
while True:
c = 0
for f in futs:
if not f.ready():
c += 1
if not c:
break
time.sleep(0.1)
iter += 1
assert iter >= 5 and iter <= 8
print("Run 3 parallel sleep(1)'s in: ", time.time() - t)
| true
| true
|
1c481af4ae671d17e7ab5377a49f168e82fa6385
| 5,618
|
py
|
Python
|
charts.py
|
suryatmodulus/excalidraw-analytics
|
6cc9ec3800d1ef51e312740a981b656940fb0660
|
[
"MIT"
] | 9
|
2021-02-07T13:15:06.000Z
|
2021-11-07T22:09:59.000Z
|
charts.py
|
suryatmodulus/excalidraw-analytics
|
6cc9ec3800d1ef51e312740a981b656940fb0660
|
[
"MIT"
] | 16
|
2021-02-08T16:10:44.000Z
|
2022-03-27T01:16:21.000Z
|
charts.py
|
suryatmodulus/excalidraw-analytics
|
6cc9ec3800d1ef51e312740a981b656940fb0660
|
[
"MIT"
] | 3
|
2021-02-08T15:09:33.000Z
|
2021-08-06T17:34:49.000Z
|
from datetime import datetime
from datetime import timedelta
from opencolor import oc
import json
import os
ROOT_DIR = os.path.dirname(os.path.realpath(__file__))
VERSION_DIR = os.path.join(ROOT_DIR, "version")
TEMPLATE_FILE = os.path.join(ROOT_DIR, "template.html")
INDEX_FILE = os.path.join(ROOT_DIR, "index.html")
MAX_DAYS = 7
def chart_colors(index):
return [
oc["grape"][index],
oc["red"][index],
oc["orange"][index],
oc["yellow"][index],
oc["lime"][index],
oc["green"][index],
oc["teal"][index],
oc["cyan"][index],
oc["blue"][index],
oc["indigo"][index],
oc["violet"][index],
]
chart_colors_bg = chart_colors(1)
chart_colors_text = chart_colors(9)
empty_color = oc["gray"][1]
usage_colors = [
oc["lime"][0],
oc["lime"][1],
oc["lime"][2],
oc["lime"][3],
oc["lime"][5],
oc["lime"][6],
]
def parse_day(filename):
filename = filename.replace(".json", "")
file_date = datetime.strptime(filename, "%Y-%m-%d")
today = datetime.today()
return file_date > today + timedelta(days=-MAX_DAYS)
def string2date(string):
return datetime.strptime(string, "%Y-%m-%d").strftime("%d %b")
def string2day(string):
return datetime.strptime(string, "%Y-%m-%d").strftime("%a")
def string2weekday(string):
return int(datetime.strptime(string, "%Y-%m-%d").strftime("%w"))
def render_cell(value, max):
color_id = round((value / max) * (len(usage_colors) - 1))
if value:
return "<td style='background-color: %s'>%2.1f%%</td>" % (
usage_colors[color_id],
value * 100,
)
return "<td style='background-color: %s'>-</td>" % (empty_color)
def main():
with open(TEMPLATE_FILE, "r") as template:
data = template.read()
_, _, filenames = next(os.walk(VERSION_DIR))
days = {}
versions = set()
for filename in filenames:
if not parse_day(filename):
continue
with open(os.path.join(VERSION_DIR, filename), "r") as day_json:
day = json.load(day_json)
days[filename.replace(".json", "")] = day
for key in day.keys():
versions.add(key)
sorted_days = sorted(days.items())
sorted_versions = sorted(versions)
# find max_value
max_value = 0
for day in sorted_days:
for version in day[1]:
max_value = max(max_value, day[1][version])
chart_rows = [["Day"]]
for version in sorted_versions:
chart_rows[len(chart_rows) - 1].append(
version[-7:] if len(version) == 28 else "Older"
)
for day in sorted_days:
chart_rows.append([string2date(day[0])])
for version in sorted_versions:
if version in day[1]:
chart_rows[len(chart_rows) - 1].append((day[1][version]))
else:
chart_rows[len(chart_rows) - 1].append(0)
report = {}
for version in sorted(sorted_versions, reverse=True):
report[version] = {}
for day in sorted_days:
report[version][day[0]] = 0
if version in day[1]:
report[version][day[0]] = day[1][version]
version_head = "<tr><th>Version</th><th>Commit</th><th style='background-color: {}'></th>".format(
empty_color
)
for day in sorted_days:
version_head += "<th style='background-color: %s'>%s<br>%s</th>" % (
oc["red"][0] if string2weekday(day[0]) in [6, 0] else oc["white"],
string2day(day[0]),
string2date(day[0]),
)
version_head += "</tr>"
version_body = ""
current_version_date = ""
for index, row in enumerate(report):
version_date = row[:10]
version_datetime = row[:16].replace("T", " ")
version_hash = row[-7:]
color_bg = chart_colors_bg[
(index - len(sorted_versions)) % len(chart_colors_bg)
]
color_text = chart_colors_text[
(index - len(sorted_versions)) % len(chart_colors_text)
]
if version_date != current_version_date:
version_body += (
"<tr><td style='background-color: {}' colspan='{}'></td></tr>".format(
empty_color, 3 + len(report[row])
)
)
version_body += "<tr><td style='background-color: {}; color: {};'><code>{}</code></td>".format(
color_bg,
color_text,
version_datetime,
)
# older version
if len(row) == 20:
version_body += "<td style='background-color: {};'></td><td style='background-color: {}'></td>".format(
color_bg,
empty_color,
)
else:
version_body += "<td style='background-color: {};'><code><a style='color: {};' href='https://github.com/excalidraw/excalidraw/commit/{}'>{}</a></code></td><td style='background-color: {}'></td>".format(
color_bg,
color_text,
version_hash,
version_hash,
empty_color,
)
for day in report[row]:
version_body += render_cell(report[row][day], max_value)
version_body += "</tr>\n"
current_version_date = version_date
data = data.replace("{ data }", "%r" % chart_rows)
data = data.replace("{ version_head }", version_head)
data = data.replace("{ version_body }", version_body)
with open(INDEX_FILE, "w") as index:
index.write(data)
print("Charts updated")
if __name__ == "__main__":
main()
| 29.413613
| 214
| 0.559808
|
from datetime import datetime
from datetime import timedelta
from opencolor import oc
import json
import os
ROOT_DIR = os.path.dirname(os.path.realpath(__file__))
VERSION_DIR = os.path.join(ROOT_DIR, "version")
TEMPLATE_FILE = os.path.join(ROOT_DIR, "template.html")
INDEX_FILE = os.path.join(ROOT_DIR, "index.html")
MAX_DAYS = 7
def chart_colors(index):
return [
oc["grape"][index],
oc["red"][index],
oc["orange"][index],
oc["yellow"][index],
oc["lime"][index],
oc["green"][index],
oc["teal"][index],
oc["cyan"][index],
oc["blue"][index],
oc["indigo"][index],
oc["violet"][index],
]
chart_colors_bg = chart_colors(1)
chart_colors_text = chart_colors(9)
empty_color = oc["gray"][1]
usage_colors = [
oc["lime"][0],
oc["lime"][1],
oc["lime"][2],
oc["lime"][3],
oc["lime"][5],
oc["lime"][6],
]
def parse_day(filename):
filename = filename.replace(".json", "")
file_date = datetime.strptime(filename, "%Y-%m-%d")
today = datetime.today()
return file_date > today + timedelta(days=-MAX_DAYS)
def string2date(string):
return datetime.strptime(string, "%Y-%m-%d").strftime("%d %b")
def string2day(string):
return datetime.strptime(string, "%Y-%m-%d").strftime("%a")
def string2weekday(string):
return int(datetime.strptime(string, "%Y-%m-%d").strftime("%w"))
def render_cell(value, max):
color_id = round((value / max) * (len(usage_colors) - 1))
if value:
return "<td style='background-color: %s'>%2.1f%%</td>" % (
usage_colors[color_id],
value * 100,
)
return "<td style='background-color: %s'>-</td>" % (empty_color)
def main():
with open(TEMPLATE_FILE, "r") as template:
data = template.read()
_, _, filenames = next(os.walk(VERSION_DIR))
days = {}
versions = set()
for filename in filenames:
if not parse_day(filename):
continue
with open(os.path.join(VERSION_DIR, filename), "r") as day_json:
day = json.load(day_json)
days[filename.replace(".json", "")] = day
for key in day.keys():
versions.add(key)
sorted_days = sorted(days.items())
sorted_versions = sorted(versions)
max_value = 0
for day in sorted_days:
for version in day[1]:
max_value = max(max_value, day[1][version])
chart_rows = [["Day"]]
for version in sorted_versions:
chart_rows[len(chart_rows) - 1].append(
version[-7:] if len(version) == 28 else "Older"
)
for day in sorted_days:
chart_rows.append([string2date(day[0])])
for version in sorted_versions:
if version in day[1]:
chart_rows[len(chart_rows) - 1].append((day[1][version]))
else:
chart_rows[len(chart_rows) - 1].append(0)
report = {}
for version in sorted(sorted_versions, reverse=True):
report[version] = {}
for day in sorted_days:
report[version][day[0]] = 0
if version in day[1]:
report[version][day[0]] = day[1][version]
version_head = "<tr><th>Version</th><th>Commit</th><th style='background-color: {}'></th>".format(
empty_color
)
for day in sorted_days:
version_head += "<th style='background-color: %s'>%s<br>%s</th>" % (
oc["red"][0] if string2weekday(day[0]) in [6, 0] else oc["white"],
string2day(day[0]),
string2date(day[0]),
)
version_head += "</tr>"
version_body = ""
current_version_date = ""
for index, row in enumerate(report):
version_date = row[:10]
version_datetime = row[:16].replace("T", " ")
version_hash = row[-7:]
color_bg = chart_colors_bg[
(index - len(sorted_versions)) % len(chart_colors_bg)
]
color_text = chart_colors_text[
(index - len(sorted_versions)) % len(chart_colors_text)
]
if version_date != current_version_date:
version_body += (
"<tr><td style='background-color: {}' colspan='{}'></td></tr>".format(
empty_color, 3 + len(report[row])
)
)
version_body += "<tr><td style='background-color: {}; color: {};'><code>{}</code></td>".format(
color_bg,
color_text,
version_datetime,
)
if len(row) == 20:
version_body += "<td style='background-color: {};'></td><td style='background-color: {}'></td>".format(
color_bg,
empty_color,
)
else:
version_body += "<td style='background-color: {};'><code><a style='color: {};' href='https://github.com/excalidraw/excalidraw/commit/{}'>{}</a></code></td><td style='background-color: {}'></td>".format(
color_bg,
color_text,
version_hash,
version_hash,
empty_color,
)
for day in report[row]:
version_body += render_cell(report[row][day], max_value)
version_body += "</tr>\n"
current_version_date = version_date
data = data.replace("{ data }", "%r" % chart_rows)
data = data.replace("{ version_head }", version_head)
data = data.replace("{ version_body }", version_body)
with open(INDEX_FILE, "w") as index:
index.write(data)
print("Charts updated")
if __name__ == "__main__":
main()
| true
| true
|
1c481be9d20dfe69d10a24305d2be9deb742efd6
| 152
|
py
|
Python
|
meiduo_mall/apps/contents/urls.py
|
yeluoguigen/meiduo_project
|
f7d416cf9ac433c27e58783f38687a1fbe3df6fe
|
[
"MIT"
] | null | null | null |
meiduo_mall/apps/contents/urls.py
|
yeluoguigen/meiduo_project
|
f7d416cf9ac433c27e58783f38687a1fbe3df6fe
|
[
"MIT"
] | null | null | null |
meiduo_mall/apps/contents/urls.py
|
yeluoguigen/meiduo_project
|
f7d416cf9ac433c27e58783f38687a1fbe3df6fe
|
[
"MIT"
] | null | null | null |
from django.conf.urls import url
from apps.contents import views
urlpatterns = [
# 注册
url(r'^$', views.IndexView.as_view(), name="index"),
]
| 15.2
| 56
| 0.664474
|
from django.conf.urls import url
from apps.contents import views
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name="index"),
]
| true
| true
|
1c481cbfa9a98ca0259c24e6baae94b4e74f1854
| 211
|
py
|
Python
|
erpnext_oralcare/erpnext_oralcare/doctype/labial_mucosa/test_labial_mucosa.py
|
techlift-tech/erpnext-oralcare
|
e10a787118c64dcedb7f9d0aedb1fe6c5a00e19e
|
[
"MIT"
] | null | null | null |
erpnext_oralcare/erpnext_oralcare/doctype/labial_mucosa/test_labial_mucosa.py
|
techlift-tech/erpnext-oralcare
|
e10a787118c64dcedb7f9d0aedb1fe6c5a00e19e
|
[
"MIT"
] | 165
|
2019-04-25T12:08:17.000Z
|
2019-08-09T13:26:03.000Z
|
erpnext_oralcare/erpnext_oralcare/doctype/labial_mucosa/test_labial_mucosa.py
|
techlift-tech/erpnext-oralcare
|
e10a787118c64dcedb7f9d0aedb1fe6c5a00e19e
|
[
"MIT"
] | 2
|
2019-09-10T16:49:11.000Z
|
2021-12-03T22:54:21.000Z
|
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Techlift and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest
class TestLabialMucosa(unittest.TestCase):
pass
| 19.181818
| 47
| 0.772512
|
from __future__ import unicode_literals
import frappe
import unittest
class TestLabialMucosa(unittest.TestCase):
pass
| true
| true
|
1c481ccf48bce4f8db10a6511af9ca1646796d16
| 2,134
|
py
|
Python
|
infinisdk/infinibox/fc_soft_target.py
|
kobutton/infinisdk
|
4ae5c66a2d7926636a52cb0a1452f9a2809b2ec2
|
[
"BSD-3-Clause"
] | 5
|
2019-02-26T20:11:43.000Z
|
2021-03-10T08:45:38.000Z
|
infinisdk/infinibox/fc_soft_target.py
|
kobutton/infinisdk
|
4ae5c66a2d7926636a52cb0a1452f9a2809b2ec2
|
[
"BSD-3-Clause"
] | 11
|
2017-11-15T19:20:23.000Z
|
2021-09-14T18:17:47.000Z
|
infinisdk/infinibox/fc_soft_target.py
|
kobutton/infinisdk
|
4ae5c66a2d7926636a52cb0a1452f9a2809b2ec2
|
[
"BSD-3-Clause"
] | 2
|
2017-11-16T11:59:05.000Z
|
2019-02-25T20:44:23.000Z
|
from urlobject import URLObject as URL
from ..core import Field
from ..core.bindings import RelatedObjectBinding, RelatedComponentBinding
from ..core.system_object import SystemObject
from ..core.system_object_utils import get_data_for_object_creation
from ..core.translators_and_types import WWNType
from ..core.type_binder import TypeBinder
class FcSoftTargetsBinder(TypeBinder):
def create_many(self, **fields):
'''
Creates multiple soft targets in single API command.
:param fields: All :class:`.FcSoftTarget` creation parameters & quantity
:returns: a list of :class:`.FcSoftTarget` objects
'''
url = self.get_url_path().add_path('create_multiple')
data = get_data_for_object_creation(self.object_type, self.system, fields)
res = self.system.api.post(url, data=data)
return [self.object_type.construct(self.system, obj_info)
for obj_info in res.get_result()]
def redistribute(self):
url = self.get_url_path().add_path('redistribute')
return self.system.api.post(url, data={})
def wipe(self):
url = self.get_url_path().add_path('wipe')
return self.system.api.post(url, data={})
class FcSoftTarget(SystemObject):
BINDER_CLASS = FcSoftTargetsBinder
URL_PATH = URL('fc/soft_targets')
FIELDS = [
Field("id", type=int, is_identity=True, cached=True),
Field("wwpn", cached=True, type=WWNType),
Field("port_number", type=int),
Field("switch", api_name="switch_id", type="infinisdk.infinibox.fc_switch:FcSwitch",
creation_parameter=True, binding=RelatedObjectBinding('fc_switches')),
Field("node", api_name="node_id",
binding=RelatedComponentBinding(api_index_name='node_id', value_for_none=None)),
Field("is_home", type=bool, add_getter=False),
]
@classmethod
def is_supported(cls, system):
return system.compat.has_npiv()
@classmethod
def get_type_name(cls):
return "fc_soft_target"
def is_home(self, **kwargs):
return self.get_field('is_home', **kwargs)
| 35.566667
| 94
| 0.685098
|
from urlobject import URLObject as URL
from ..core import Field
from ..core.bindings import RelatedObjectBinding, RelatedComponentBinding
from ..core.system_object import SystemObject
from ..core.system_object_utils import get_data_for_object_creation
from ..core.translators_and_types import WWNType
from ..core.type_binder import TypeBinder
class FcSoftTargetsBinder(TypeBinder):
def create_many(self, **fields):
url = self.get_url_path().add_path('create_multiple')
data = get_data_for_object_creation(self.object_type, self.system, fields)
res = self.system.api.post(url, data=data)
return [self.object_type.construct(self.system, obj_info)
for obj_info in res.get_result()]
def redistribute(self):
url = self.get_url_path().add_path('redistribute')
return self.system.api.post(url, data={})
def wipe(self):
url = self.get_url_path().add_path('wipe')
return self.system.api.post(url, data={})
class FcSoftTarget(SystemObject):
BINDER_CLASS = FcSoftTargetsBinder
URL_PATH = URL('fc/soft_targets')
FIELDS = [
Field("id", type=int, is_identity=True, cached=True),
Field("wwpn", cached=True, type=WWNType),
Field("port_number", type=int),
Field("switch", api_name="switch_id", type="infinisdk.infinibox.fc_switch:FcSwitch",
creation_parameter=True, binding=RelatedObjectBinding('fc_switches')),
Field("node", api_name="node_id",
binding=RelatedComponentBinding(api_index_name='node_id', value_for_none=None)),
Field("is_home", type=bool, add_getter=False),
]
@classmethod
def is_supported(cls, system):
return system.compat.has_npiv()
@classmethod
def get_type_name(cls):
return "fc_soft_target"
def is_home(self, **kwargs):
return self.get_field('is_home', **kwargs)
| true
| true
|
1c481cf6fa6269a851564b750d3143531073ef45
| 13,845
|
py
|
Python
|
mne/channels/tests/test_layout.py
|
fmamashli/mne-python
|
52f064415e7c9fa8fe243d22108dcdf3d86505b9
|
[
"BSD-3-Clause"
] | null | null | null |
mne/channels/tests/test_layout.py
|
fmamashli/mne-python
|
52f064415e7c9fa8fe243d22108dcdf3d86505b9
|
[
"BSD-3-Clause"
] | 23
|
2017-09-12T11:08:26.000Z
|
2019-10-04T11:11:29.000Z
|
mne/channels/tests/test_layout.py
|
fmamashli/mne-python
|
52f064415e7c9fa8fe243d22108dcdf3d86505b9
|
[
"BSD-3-Clause"
] | 3
|
2019-01-28T13:48:00.000Z
|
2019-07-10T16:02:11.000Z
|
# Authors: Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Denis Engemann <denis.engemann@gmail.com>
# Martin Luessi <mluessi@nmr.mgh.harvard.edu>
# Eric Larson <larson.eric.d@gmail.com>
#
# License: Simplified BSD
import copy
import os.path as op
import numpy as np
from numpy.testing import (assert_array_almost_equal, assert_array_equal,
assert_allclose, assert_equal)
import pytest
import matplotlib.pyplot as plt
from mne.channels import (make_eeg_layout, make_grid_layout, read_layout,
find_layout)
from mne.channels.layout import (_box_size, _auto_topomap_coords,
generate_2d_layout)
from mne.utils import run_tests_if_main
from mne import pick_types, pick_info
from mne.io import read_raw_kit, _empty_info, read_info
from mne.io.constants import FIFF
from mne.bem import fit_sphere_to_headshape
from mne.utils import _TempDir
io_dir = op.join(op.dirname(__file__), '..', '..', 'io')
fif_fname = op.join(io_dir, 'tests', 'data', 'test_raw.fif')
lout_path = op.join(io_dir, 'tests', 'data')
bti_dir = op.join(io_dir, 'bti', 'tests', 'data')
fname_ctf_raw = op.join(io_dir, 'tests', 'data', 'test_ctf_comp_raw.fif')
fname_kit_157 = op.join(io_dir, 'kit', 'tests', 'data', 'test.sqd')
fname_kit_umd = op.join(io_dir, 'kit', 'tests', 'data', 'test_umd-raw.sqd')
def _get_test_info():
"""Make test info."""
test_info = _empty_info(1000)
loc = np.array([0., 0., 0., 1., 0., 0., 0., 1., 0., 0., 0., 1.],
dtype=np.float32)
test_info['chs'] = [
{'cal': 1, 'ch_name': 'ICA 001', 'coil_type': 0, 'coord_Frame': 0,
'kind': 502, 'loc': loc.copy(), 'logno': 1, 'range': 1.0, 'scanno': 1,
'unit': -1, 'unit_mul': 0},
{'cal': 1, 'ch_name': 'ICA 002', 'coil_type': 0, 'coord_Frame': 0,
'kind': 502, 'loc': loc.copy(), 'logno': 2, 'range': 1.0, 'scanno': 2,
'unit': -1, 'unit_mul': 0},
{'cal': 0.002142000012099743, 'ch_name': 'EOG 061', 'coil_type': 1,
'coord_frame': 0, 'kind': 202, 'loc': loc.copy(), 'logno': 61,
'range': 1.0, 'scanno': 376, 'unit': 107, 'unit_mul': 0}]
test_info._update_redundant()
test_info._check_consistency()
return test_info
def test_io_layout_lout():
"""Test IO with .lout files."""
tempdir = _TempDir()
layout = read_layout('Vectorview-all', scale=False)
layout.save(op.join(tempdir, 'foobar.lout'))
layout_read = read_layout(op.join(tempdir, 'foobar.lout'), path='./',
scale=False)
assert_array_almost_equal(layout.pos, layout_read.pos, decimal=2)
assert layout.names == layout_read.names
print(layout) # test repr
def test_io_layout_lay():
"""Test IO with .lay files."""
tempdir = _TempDir()
layout = read_layout('CTF151', scale=False)
layout.save(op.join(tempdir, 'foobar.lay'))
layout_read = read_layout(op.join(tempdir, 'foobar.lay'), path='./',
scale=False)
assert_array_almost_equal(layout.pos, layout_read.pos, decimal=2)
assert layout.names == layout_read.names
def test_auto_topomap_coords():
"""Test mapping of coordinates in 3D space to 2D."""
info = read_info(fif_fname)
picks = pick_types(info, meg=False, eeg=True, eog=False, stim=False)
# Remove extra digitization point, so EEG digitization points match up
# with the EEG channels
del info['dig'][85]
# Remove head origin from channel locations, so mapping with digitization
# points yields the same result
dig_kinds = (FIFF.FIFFV_POINT_CARDINAL,
FIFF.FIFFV_POINT_EEG,
FIFF.FIFFV_POINT_EXTRA)
_, origin_head, _ = fit_sphere_to_headshape(info, dig_kinds, units='m')
for ch in info['chs']:
ch['loc'][:3] -= origin_head
# Use channel locations
l0 = _auto_topomap_coords(info, picks)
# Remove electrode position information, use digitization points from now
# on.
for ch in info['chs']:
ch['loc'].fill(np.nan)
l1 = _auto_topomap_coords(info, picks)
assert_allclose(l1, l0, atol=1e-3)
# Test plotting mag topomap without channel locations: it should fail
mag_picks = pick_types(info, meg='mag')
pytest.raises(ValueError, _auto_topomap_coords, info, mag_picks)
# Test function with too many EEG digitization points: it should fail
info['dig'].append({'r': [1, 2, 3], 'kind': FIFF.FIFFV_POINT_EEG})
pytest.raises(ValueError, _auto_topomap_coords, info, picks)
# Test function with too little EEG digitization points: it should fail
info['dig'] = info['dig'][:-2]
pytest.raises(ValueError, _auto_topomap_coords, info, picks)
# Electrode positions must be unique
info['dig'].append(info['dig'][-1])
pytest.raises(ValueError, _auto_topomap_coords, info, picks)
# Test function without EEG digitization points: it should fail
info['dig'] = [d for d in info['dig'] if d['kind'] != FIFF.FIFFV_POINT_EEG]
pytest.raises(RuntimeError, _auto_topomap_coords, info, picks)
# Test function without any digitization points, it should fail
info['dig'] = None
pytest.raises(RuntimeError, _auto_topomap_coords, info, picks)
info['dig'] = []
pytest.raises(RuntimeError, _auto_topomap_coords, info, picks)
def test_make_eeg_layout():
"""Test creation of EEG layout."""
tempdir = _TempDir()
tmp_name = 'foo'
lout_name = 'test_raw'
lout_orig = read_layout(kind=lout_name, path=lout_path)
info = read_info(fif_fname)
info['bads'].append(info['ch_names'][360])
layout = make_eeg_layout(info, exclude=[])
assert_array_equal(len(layout.names), len([ch for ch in info['ch_names']
if ch.startswith('EE')]))
layout.save(op.join(tempdir, tmp_name + '.lout'))
lout_new = read_layout(kind=tmp_name, path=tempdir, scale=False)
assert_array_equal(lout_new.kind, tmp_name)
assert_allclose(layout.pos, lout_new.pos, atol=0.1)
assert_array_equal(lout_orig.names, lout_new.names)
# Test input validation
pytest.raises(ValueError, make_eeg_layout, info, radius=-0.1)
pytest.raises(ValueError, make_eeg_layout, info, radius=0.6)
pytest.raises(ValueError, make_eeg_layout, info, width=-0.1)
pytest.raises(ValueError, make_eeg_layout, info, width=1.1)
pytest.raises(ValueError, make_eeg_layout, info, height=-0.1)
pytest.raises(ValueError, make_eeg_layout, info, height=1.1)
def test_make_grid_layout():
"""Test creation of grid layout."""
tempdir = _TempDir()
tmp_name = 'bar'
lout_name = 'test_ica'
lout_orig = read_layout(kind=lout_name, path=lout_path)
layout = make_grid_layout(_get_test_info())
layout.save(op.join(tempdir, tmp_name + '.lout'))
lout_new = read_layout(kind=tmp_name, path=tempdir)
assert_array_equal(lout_new.kind, tmp_name)
assert_array_equal(lout_orig.pos, lout_new.pos)
assert_array_equal(lout_orig.names, lout_new.names)
# Test creating grid layout with specified number of columns
layout = make_grid_layout(_get_test_info(), n_col=2)
# Vertical positions should be equal
assert layout.pos[0, 1] == layout.pos[1, 1]
# Horizontal positions should be unequal
assert layout.pos[0, 0] != layout.pos[1, 0]
# Box sizes should be equal
assert_array_equal(layout.pos[0, 3:], layout.pos[1, 3:])
def test_find_layout():
"""Test finding layout."""
pytest.raises(ValueError, find_layout, _get_test_info(), ch_type='meep')
sample_info = read_info(fif_fname)
grads = pick_types(sample_info, meg='grad')
sample_info2 = pick_info(sample_info, grads)
mags = pick_types(sample_info, meg='mag')
sample_info3 = pick_info(sample_info, mags)
# mock new convention
sample_info4 = copy.deepcopy(sample_info)
for ii, name in enumerate(sample_info4['ch_names']):
new = name.replace(' ', '')
sample_info4['chs'][ii]['ch_name'] = new
eegs = pick_types(sample_info, meg=False, eeg=True)
sample_info5 = pick_info(sample_info, eegs)
lout = find_layout(sample_info, ch_type=None)
assert lout.kind == 'Vectorview-all'
assert all(' ' in k for k in lout.names)
lout = find_layout(sample_info2, ch_type='meg')
assert_equal(lout.kind, 'Vectorview-all')
# test new vector-view
lout = find_layout(sample_info4, ch_type=None)
assert_equal(lout.kind, 'Vectorview-all')
assert all(' ' not in k for k in lout.names)
lout = find_layout(sample_info, ch_type='grad')
assert_equal(lout.kind, 'Vectorview-grad')
lout = find_layout(sample_info2)
assert_equal(lout.kind, 'Vectorview-grad')
lout = find_layout(sample_info2, ch_type='grad')
assert_equal(lout.kind, 'Vectorview-grad')
lout = find_layout(sample_info2, ch_type='meg')
assert_equal(lout.kind, 'Vectorview-all')
lout = find_layout(sample_info, ch_type='mag')
assert_equal(lout.kind, 'Vectorview-mag')
lout = find_layout(sample_info3)
assert_equal(lout.kind, 'Vectorview-mag')
lout = find_layout(sample_info3, ch_type='mag')
assert_equal(lout.kind, 'Vectorview-mag')
lout = find_layout(sample_info3, ch_type='meg')
assert_equal(lout.kind, 'Vectorview-all')
lout = find_layout(sample_info, ch_type='eeg')
assert_equal(lout.kind, 'EEG')
lout = find_layout(sample_info5)
assert_equal(lout.kind, 'EEG')
lout = find_layout(sample_info5, ch_type='eeg')
assert_equal(lout.kind, 'EEG')
# no common layout, 'meg' option not supported
lout = find_layout(read_info(fname_ctf_raw))
assert_equal(lout.kind, 'CTF-275')
fname_bti_raw = op.join(bti_dir, 'exported4D_linux_raw.fif')
lout = find_layout(read_info(fname_bti_raw))
assert_equal(lout.kind, 'magnesWH3600')
raw_kit = read_raw_kit(fname_kit_157)
lout = find_layout(raw_kit.info)
assert_equal(lout.kind, 'KIT-157')
raw_kit.info['bads'] = ['MEG 13', 'MEG 14', 'MEG 15', 'MEG 16']
lout = find_layout(raw_kit.info)
assert_equal(lout.kind, 'KIT-157')
raw_umd = read_raw_kit(fname_kit_umd)
lout = find_layout(raw_umd.info)
assert_equal(lout.kind, 'KIT-UMD-3')
# Test plotting
lout.plot()
lout.plot(picks=np.arange(10))
plt.close('all')
def test_box_size():
"""Test calculation of box sizes."""
# No points. Box size should be 1,1.
assert_allclose(_box_size([]), (1.0, 1.0))
# Create one point. Box size should be 1,1.
point = [(0, 0)]
assert_allclose(_box_size(point), (1.0, 1.0))
# Create two points. Box size should be 0.5,1.
points = [(0.25, 0.5), (0.75, 0.5)]
assert_allclose(_box_size(points), (0.5, 1.0))
# Create three points. Box size should be (0.5, 0.5).
points = [(0.25, 0.25), (0.75, 0.25), (0.5, 0.75)]
assert_allclose(_box_size(points), (0.5, 0.5))
# Create a grid of points. Box size should be (0.1, 0.1).
x, y = np.meshgrid(np.linspace(-0.5, 0.5, 11), np.linspace(-0.5, 0.5, 11))
x, y = x.ravel(), y.ravel()
assert_allclose(_box_size(np.c_[x, y]), (0.1, 0.1))
# Create a random set of points. This should never break the function.
rng = np.random.RandomState(42)
points = rng.rand(100, 2)
width, height = _box_size(points)
assert width is not None
assert height is not None
# Test specifying an existing width.
points = [(0.25, 0.25), (0.75, 0.25), (0.5, 0.75)]
assert_allclose(_box_size(points, width=0.4), (0.4, 0.5))
# Test specifying an existing width that has influence on the calculated
# height.
points = [(0.25, 0.25), (0.75, 0.25), (0.5, 0.75)]
assert_allclose(_box_size(points, width=0.2), (0.2, 1.0))
# Test specifying an existing height.
points = [(0.25, 0.25), (0.75, 0.25), (0.5, 0.75)]
assert_allclose(_box_size(points, height=0.4), (0.5, 0.4))
# Test specifying an existing height that has influence on the calculated
# width.
points = [(0.25, 0.25), (0.75, 0.45), (0.5, 0.75)]
assert_allclose(_box_size(points, height=0.1), (1.0, 0.1))
# Test specifying both width and height. The function should simply return
# these.
points = [(0.25, 0.25), (0.75, 0.45), (0.5, 0.75)]
assert_array_equal(_box_size(points, width=0.1, height=0.1), (0.1, 0.1))
# Test specifying a width that will cause unfixable horizontal overlap and
# essentially breaks the function (height will be 0).
points = [(0.25, 0.25), (0.75, 0.25), (0.5, 0.75)]
assert_array_equal(_box_size(points, width=1), (1, 0))
# Test adding some padding.
# Create three points. Box size should be a little less than (0.5, 0.5).
points = [(0.25, 0.25), (0.75, 0.25), (0.5, 0.75)]
assert_allclose(_box_size(points, padding=0.1), (0.9 * 0.5, 0.9 * 0.5))
def test_generate_2d_layout():
"""Test creation of a layout from 2d points."""
snobg = 10
sbg = 15
side = range(snobg)
bg_image = np.random.RandomState(42).randn(sbg, sbg)
w, h = [.2, .5]
# Generate fake data
xy = np.array([(i, j) for i in side for j in side])
lt = generate_2d_layout(xy, w=w, h=h)
# Correct points ordering / minmaxing
comp_1, comp_2 = [(5, 0), (7, 0)]
assert lt.pos[:, :2].max() == 1
assert lt.pos[:, :2].min() == 0
with np.errstate(invalid='ignore'): # divide by zero
assert_allclose(xy[comp_2] / float(xy[comp_1]),
lt.pos[comp_2] / float(lt.pos[comp_1]))
assert_allclose(lt.pos[0, [2, 3]], [w, h])
# Correct number elements
assert lt.pos.shape[1] == 4
assert len(lt.box) == 4
# Make sure background image normalizing is correct
lt_bg = generate_2d_layout(xy, bg_image=bg_image)
assert_allclose(lt_bg.pos[:, :2].max(), xy.max() / float(sbg))
run_tests_if_main()
| 38.245856
| 79
| 0.656555
|
import copy
import os.path as op
import numpy as np
from numpy.testing import (assert_array_almost_equal, assert_array_equal,
assert_allclose, assert_equal)
import pytest
import matplotlib.pyplot as plt
from mne.channels import (make_eeg_layout, make_grid_layout, read_layout,
find_layout)
from mne.channels.layout import (_box_size, _auto_topomap_coords,
generate_2d_layout)
from mne.utils import run_tests_if_main
from mne import pick_types, pick_info
from mne.io import read_raw_kit, _empty_info, read_info
from mne.io.constants import FIFF
from mne.bem import fit_sphere_to_headshape
from mne.utils import _TempDir
io_dir = op.join(op.dirname(__file__), '..', '..', 'io')
fif_fname = op.join(io_dir, 'tests', 'data', 'test_raw.fif')
lout_path = op.join(io_dir, 'tests', 'data')
bti_dir = op.join(io_dir, 'bti', 'tests', 'data')
fname_ctf_raw = op.join(io_dir, 'tests', 'data', 'test_ctf_comp_raw.fif')
fname_kit_157 = op.join(io_dir, 'kit', 'tests', 'data', 'test.sqd')
fname_kit_umd = op.join(io_dir, 'kit', 'tests', 'data', 'test_umd-raw.sqd')
def _get_test_info():
test_info = _empty_info(1000)
loc = np.array([0., 0., 0., 1., 0., 0., 0., 1., 0., 0., 0., 1.],
dtype=np.float32)
test_info['chs'] = [
{'cal': 1, 'ch_name': 'ICA 001', 'coil_type': 0, 'coord_Frame': 0,
'kind': 502, 'loc': loc.copy(), 'logno': 1, 'range': 1.0, 'scanno': 1,
'unit': -1, 'unit_mul': 0},
{'cal': 1, 'ch_name': 'ICA 002', 'coil_type': 0, 'coord_Frame': 0,
'kind': 502, 'loc': loc.copy(), 'logno': 2, 'range': 1.0, 'scanno': 2,
'unit': -1, 'unit_mul': 0},
{'cal': 0.002142000012099743, 'ch_name': 'EOG 061', 'coil_type': 1,
'coord_frame': 0, 'kind': 202, 'loc': loc.copy(), 'logno': 61,
'range': 1.0, 'scanno': 376, 'unit': 107, 'unit_mul': 0}]
test_info._update_redundant()
test_info._check_consistency()
return test_info
def test_io_layout_lout():
tempdir = _TempDir()
layout = read_layout('Vectorview-all', scale=False)
layout.save(op.join(tempdir, 'foobar.lout'))
layout_read = read_layout(op.join(tempdir, 'foobar.lout'), path='./',
scale=False)
assert_array_almost_equal(layout.pos, layout_read.pos, decimal=2)
assert layout.names == layout_read.names
print(layout)
def test_io_layout_lay():
tempdir = _TempDir()
layout = read_layout('CTF151', scale=False)
layout.save(op.join(tempdir, 'foobar.lay'))
layout_read = read_layout(op.join(tempdir, 'foobar.lay'), path='./',
scale=False)
assert_array_almost_equal(layout.pos, layout_read.pos, decimal=2)
assert layout.names == layout_read.names
def test_auto_topomap_coords():
info = read_info(fif_fname)
picks = pick_types(info, meg=False, eeg=True, eog=False, stim=False)
del info['dig'][85]
dig_kinds = (FIFF.FIFFV_POINT_CARDINAL,
FIFF.FIFFV_POINT_EEG,
FIFF.FIFFV_POINT_EXTRA)
_, origin_head, _ = fit_sphere_to_headshape(info, dig_kinds, units='m')
for ch in info['chs']:
ch['loc'][:3] -= origin_head
l0 = _auto_topomap_coords(info, picks)
for ch in info['chs']:
ch['loc'].fill(np.nan)
l1 = _auto_topomap_coords(info, picks)
assert_allclose(l1, l0, atol=1e-3)
mag_picks = pick_types(info, meg='mag')
pytest.raises(ValueError, _auto_topomap_coords, info, mag_picks)
info['dig'].append({'r': [1, 2, 3], 'kind': FIFF.FIFFV_POINT_EEG})
pytest.raises(ValueError, _auto_topomap_coords, info, picks)
info['dig'] = info['dig'][:-2]
pytest.raises(ValueError, _auto_topomap_coords, info, picks)
info['dig'].append(info['dig'][-1])
pytest.raises(ValueError, _auto_topomap_coords, info, picks)
info['dig'] = [d for d in info['dig'] if d['kind'] != FIFF.FIFFV_POINT_EEG]
pytest.raises(RuntimeError, _auto_topomap_coords, info, picks)
info['dig'] = None
pytest.raises(RuntimeError, _auto_topomap_coords, info, picks)
info['dig'] = []
pytest.raises(RuntimeError, _auto_topomap_coords, info, picks)
def test_make_eeg_layout():
tempdir = _TempDir()
tmp_name = 'foo'
lout_name = 'test_raw'
lout_orig = read_layout(kind=lout_name, path=lout_path)
info = read_info(fif_fname)
info['bads'].append(info['ch_names'][360])
layout = make_eeg_layout(info, exclude=[])
assert_array_equal(len(layout.names), len([ch for ch in info['ch_names']
if ch.startswith('EE')]))
layout.save(op.join(tempdir, tmp_name + '.lout'))
lout_new = read_layout(kind=tmp_name, path=tempdir, scale=False)
assert_array_equal(lout_new.kind, tmp_name)
assert_allclose(layout.pos, lout_new.pos, atol=0.1)
assert_array_equal(lout_orig.names, lout_new.names)
pytest.raises(ValueError, make_eeg_layout, info, radius=-0.1)
pytest.raises(ValueError, make_eeg_layout, info, radius=0.6)
pytest.raises(ValueError, make_eeg_layout, info, width=-0.1)
pytest.raises(ValueError, make_eeg_layout, info, width=1.1)
pytest.raises(ValueError, make_eeg_layout, info, height=-0.1)
pytest.raises(ValueError, make_eeg_layout, info, height=1.1)
def test_make_grid_layout():
tempdir = _TempDir()
tmp_name = 'bar'
lout_name = 'test_ica'
lout_orig = read_layout(kind=lout_name, path=lout_path)
layout = make_grid_layout(_get_test_info())
layout.save(op.join(tempdir, tmp_name + '.lout'))
lout_new = read_layout(kind=tmp_name, path=tempdir)
assert_array_equal(lout_new.kind, tmp_name)
assert_array_equal(lout_orig.pos, lout_new.pos)
assert_array_equal(lout_orig.names, lout_new.names)
layout = make_grid_layout(_get_test_info(), n_col=2)
assert layout.pos[0, 1] == layout.pos[1, 1]
assert layout.pos[0, 0] != layout.pos[1, 0]
assert_array_equal(layout.pos[0, 3:], layout.pos[1, 3:])
def test_find_layout():
pytest.raises(ValueError, find_layout, _get_test_info(), ch_type='meep')
sample_info = read_info(fif_fname)
grads = pick_types(sample_info, meg='grad')
sample_info2 = pick_info(sample_info, grads)
mags = pick_types(sample_info, meg='mag')
sample_info3 = pick_info(sample_info, mags)
sample_info4 = copy.deepcopy(sample_info)
for ii, name in enumerate(sample_info4['ch_names']):
new = name.replace(' ', '')
sample_info4['chs'][ii]['ch_name'] = new
eegs = pick_types(sample_info, meg=False, eeg=True)
sample_info5 = pick_info(sample_info, eegs)
lout = find_layout(sample_info, ch_type=None)
assert lout.kind == 'Vectorview-all'
assert all(' ' in k for k in lout.names)
lout = find_layout(sample_info2, ch_type='meg')
assert_equal(lout.kind, 'Vectorview-all')
lout = find_layout(sample_info4, ch_type=None)
assert_equal(lout.kind, 'Vectorview-all')
assert all(' ' not in k for k in lout.names)
lout = find_layout(sample_info, ch_type='grad')
assert_equal(lout.kind, 'Vectorview-grad')
lout = find_layout(sample_info2)
assert_equal(lout.kind, 'Vectorview-grad')
lout = find_layout(sample_info2, ch_type='grad')
assert_equal(lout.kind, 'Vectorview-grad')
lout = find_layout(sample_info2, ch_type='meg')
assert_equal(lout.kind, 'Vectorview-all')
lout = find_layout(sample_info, ch_type='mag')
assert_equal(lout.kind, 'Vectorview-mag')
lout = find_layout(sample_info3)
assert_equal(lout.kind, 'Vectorview-mag')
lout = find_layout(sample_info3, ch_type='mag')
assert_equal(lout.kind, 'Vectorview-mag')
lout = find_layout(sample_info3, ch_type='meg')
assert_equal(lout.kind, 'Vectorview-all')
lout = find_layout(sample_info, ch_type='eeg')
assert_equal(lout.kind, 'EEG')
lout = find_layout(sample_info5)
assert_equal(lout.kind, 'EEG')
lout = find_layout(sample_info5, ch_type='eeg')
assert_equal(lout.kind, 'EEG')
lout = find_layout(read_info(fname_ctf_raw))
assert_equal(lout.kind, 'CTF-275')
fname_bti_raw = op.join(bti_dir, 'exported4D_linux_raw.fif')
lout = find_layout(read_info(fname_bti_raw))
assert_equal(lout.kind, 'magnesWH3600')
raw_kit = read_raw_kit(fname_kit_157)
lout = find_layout(raw_kit.info)
assert_equal(lout.kind, 'KIT-157')
raw_kit.info['bads'] = ['MEG 13', 'MEG 14', 'MEG 15', 'MEG 16']
lout = find_layout(raw_kit.info)
assert_equal(lout.kind, 'KIT-157')
raw_umd = read_raw_kit(fname_kit_umd)
lout = find_layout(raw_umd.info)
assert_equal(lout.kind, 'KIT-UMD-3')
lout.plot()
lout.plot(picks=np.arange(10))
plt.close('all')
def test_box_size():
assert_allclose(_box_size([]), (1.0, 1.0))
point = [(0, 0)]
assert_allclose(_box_size(point), (1.0, 1.0))
points = [(0.25, 0.5), (0.75, 0.5)]
assert_allclose(_box_size(points), (0.5, 1.0))
points = [(0.25, 0.25), (0.75, 0.25), (0.5, 0.75)]
assert_allclose(_box_size(points), (0.5, 0.5))
x, y = np.meshgrid(np.linspace(-0.5, 0.5, 11), np.linspace(-0.5, 0.5, 11))
x, y = x.ravel(), y.ravel()
assert_allclose(_box_size(np.c_[x, y]), (0.1, 0.1))
rng = np.random.RandomState(42)
points = rng.rand(100, 2)
width, height = _box_size(points)
assert width is not None
assert height is not None
points = [(0.25, 0.25), (0.75, 0.25), (0.5, 0.75)]
assert_allclose(_box_size(points, width=0.4), (0.4, 0.5))
points = [(0.25, 0.25), (0.75, 0.25), (0.5, 0.75)]
assert_allclose(_box_size(points, width=0.2), (0.2, 1.0))
points = [(0.25, 0.25), (0.75, 0.25), (0.5, 0.75)]
assert_allclose(_box_size(points, height=0.4), (0.5, 0.4))
points = [(0.25, 0.25), (0.75, 0.45), (0.5, 0.75)]
assert_allclose(_box_size(points, height=0.1), (1.0, 0.1))
points = [(0.25, 0.25), (0.75, 0.45), (0.5, 0.75)]
assert_array_equal(_box_size(points, width=0.1, height=0.1), (0.1, 0.1))
points = [(0.25, 0.25), (0.75, 0.25), (0.5, 0.75)]
assert_array_equal(_box_size(points, width=1), (1, 0))
points = [(0.25, 0.25), (0.75, 0.25), (0.5, 0.75)]
assert_allclose(_box_size(points, padding=0.1), (0.9 * 0.5, 0.9 * 0.5))
def test_generate_2d_layout():
snobg = 10
sbg = 15
side = range(snobg)
bg_image = np.random.RandomState(42).randn(sbg, sbg)
w, h = [.2, .5]
xy = np.array([(i, j) for i in side for j in side])
lt = generate_2d_layout(xy, w=w, h=h)
comp_1, comp_2 = [(5, 0), (7, 0)]
assert lt.pos[:, :2].max() == 1
assert lt.pos[:, :2].min() == 0
with np.errstate(invalid='ignore'):
assert_allclose(xy[comp_2] / float(xy[comp_1]),
lt.pos[comp_2] / float(lt.pos[comp_1]))
assert_allclose(lt.pos[0, [2, 3]], [w, h])
assert lt.pos.shape[1] == 4
assert len(lt.box) == 4
lt_bg = generate_2d_layout(xy, bg_image=bg_image)
assert_allclose(lt_bg.pos[:, :2].max(), xy.max() / float(sbg))
run_tests_if_main()
| true
| true
|
1c481d4fefb3cdfdf7173e935ffef9744efd41a5
| 1,625
|
py
|
Python
|
eppy/tests/EPlusInterfaceFunctions_tests/integration.py
|
hnagda/eppy
|
422399ada78eb9f39ae61f96b385fe41a0a19100
|
[
"MIT"
] | 116
|
2015-04-07T13:58:34.000Z
|
2022-02-23T15:52:35.000Z
|
eppy/tests/EPlusInterfaceFunctions_tests/integration.py
|
hnagda/eppy
|
422399ada78eb9f39ae61f96b385fe41a0a19100
|
[
"MIT"
] | 324
|
2015-01-01T04:10:46.000Z
|
2022-03-16T16:28:13.000Z
|
eppy/tests/EPlusInterfaceFunctions_tests/integration.py
|
hnagda/eppy
|
422399ada78eb9f39ae61f96b385fe41a0a19100
|
[
"MIT"
] | 65
|
2015-07-14T21:57:02.000Z
|
2022-02-14T08:39:02.000Z
|
# Copyright (c) 2016 Santosh Philip
# =======================================================================
# Distributed under the MIT License.
# (See accompanying file LICENSE or copy at
# http://opensource.org/licenses/MIT)
"""integration tests for EPlusInterfaceFunctions"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import eppy.EPlusInterfaceFunctions.iddgroups as iddgroups
def test_idd2group():
"""py.test for idd2group"""
data = (
(
"./eppy/tests/EPlusInterfaceFunctions_tests/integration/iddgroups.idd",
{
"G2": ["VersionG", "VersionG1", "VersionG2"],
"G1": ["Version", "Version1", "Version2"],
None: ["Lead Input", "Simulation Data"],
},
), # gdict
)
for fname, gdict in data:
result = iddgroups.idd2group(fname)
assert result == gdict
def test_idd2grouplist():
"""py.test idd2grouplist"""
data = (
(
"./eppy/tests/EPlusInterfaceFunctions_tests/integration/iddgroups.idd",
[
(None, "Lead Input"),
(None, "Simulation Data"),
("G1", "Version"),
("G1", "Version1"),
("G1", "Version2"),
("G2", "VersionG"),
("G2", "VersionG1"),
("G2", "VersionG2"),
],
), # glist
)
for fname, glist in data:
result = iddgroups.idd2grouplist(fname)
assert result == glist
| 30.092593
| 83
| 0.533538
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import eppy.EPlusInterfaceFunctions.iddgroups as iddgroups
def test_idd2group():
data = (
(
"./eppy/tests/EPlusInterfaceFunctions_tests/integration/iddgroups.idd",
{
"G2": ["VersionG", "VersionG1", "VersionG2"],
"G1": ["Version", "Version1", "Version2"],
None: ["Lead Input", "Simulation Data"],
},
),
)
for fname, gdict in data:
result = iddgroups.idd2group(fname)
assert result == gdict
def test_idd2grouplist():
data = (
(
"./eppy/tests/EPlusInterfaceFunctions_tests/integration/iddgroups.idd",
[
(None, "Lead Input"),
(None, "Simulation Data"),
("G1", "Version"),
("G1", "Version1"),
("G1", "Version2"),
("G2", "VersionG"),
("G2", "VersionG1"),
("G2", "VersionG2"),
],
),
)
for fname, glist in data:
result = iddgroups.idd2grouplist(fname)
assert result == glist
| true
| true
|
1c481da32dcbefeebfd9b534a8ab1eb08416bc0c
| 1,059
|
py
|
Python
|
datasets/charades.py
|
rohitgirdhar/ActionVLAD
|
0d87b69d3069db3fe521923675e353f755c5d765
|
[
"Apache-2.0"
] | 228
|
2017-05-08T04:50:49.000Z
|
2021-12-23T10:57:52.000Z
|
datasets/charades.py
|
icyzhang0923/ActionVLAD
|
08d3d65301940991e0a0cdca2c0534edf6749f41
|
[
"Apache-2.0"
] | 39
|
2017-05-12T05:23:46.000Z
|
2021-03-21T03:40:02.000Z
|
datasets/charades.py
|
icyzhang0923/ActionVLAD
|
08d3d65301940991e0a0cdca2c0534edf6749f41
|
[
"Apache-2.0"
] | 84
|
2017-05-08T01:11:25.000Z
|
2021-06-03T09:37:36.000Z
|
# ------------------------------------------------------------------------------
# ActionVLAD: Learning spatio-temporal aggregation for action classification
# Copyright (c) 2017 Carnegie Mellon University and Adobe Systems Incorporated
# Please see LICENSE on https://github.com/rohitgirdhar/ActionVLAD/ for details
# ------------------------------------------------------------------------------
"""Provides data for the Charades dataset.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from datasets.video_data_utils import gen_dataset
def get_split(split_name, dataset_dir, dataset_list_dir='', file_pattern=None,
reader=None, modality='rgb', num_samples=1,
split_id=1):
_NUM_CLASSES = 157
_LIST_FN = lambda split, id: \
'%s/%s_split%d.txt' % (dataset_list_dir, split, id)
return gen_dataset(split_name, dataset_dir, file_pattern,
reader, modality, num_samples, split_id,
_NUM_CLASSES, _LIST_FN)
| 39.222222
| 80
| 0.614731
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from datasets.video_data_utils import gen_dataset
def get_split(split_name, dataset_dir, dataset_list_dir='', file_pattern=None,
reader=None, modality='rgb', num_samples=1,
split_id=1):
_NUM_CLASSES = 157
_LIST_FN = lambda split, id: \
'%s/%s_split%d.txt' % (dataset_list_dir, split, id)
return gen_dataset(split_name, dataset_dir, file_pattern,
reader, modality, num_samples, split_id,
_NUM_CLASSES, _LIST_FN)
| true
| true
|
1c481dc560ad3b371654f3a94b66f2aba9d8f20a
| 7,385
|
py
|
Python
|
python/ccxt/yobit.py
|
yevsev/ccxt
|
7200521a005a2ddc23efe7bd003628b8e8b955dd
|
[
"MIT"
] | null | null | null |
python/ccxt/yobit.py
|
yevsev/ccxt
|
7200521a005a2ddc23efe7bd003628b8e8b955dd
|
[
"MIT"
] | null | null | null |
python/ccxt/yobit.py
|
yevsev/ccxt
|
7200521a005a2ddc23efe7bd003628b8e8b955dd
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.liqui import liqui
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import DDoSProtection
class yobit (liqui):
def describe(self):
return self.deep_extend(super(yobit, self).describe(), {
'id': 'yobit',
'name': 'YoBit',
'countries': 'RU',
'rateLimit': 3000, # responses are cached every 2 seconds
'version': '3',
'has': {
'createDepositAddress': True,
'fetchDepositAddress': True,
'CORS': False,
'withdraw': True,
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/27766910-cdcbfdae-5eea-11e7-9859-03fea873272d.jpg',
'api': {
'public': 'https://yobit.net/api',
'private': 'https://yobit.net/tapi',
},
'www': 'https://www.yobit.net',
'doc': 'https://www.yobit.net/en/api/',
'fees': 'https://www.yobit.net/en/fees/',
},
'api': {
'public': {
'get': [
'depth/{pair}',
'info',
'ticker/{pair}',
'trades/{pair}',
],
},
'private': {
'post': [
'ActiveOrders',
'CancelOrder',
'GetDepositAddress',
'getInfo',
'OrderInfo',
'Trade',
'TradeHistory',
'WithdrawCoinsToAddress',
],
},
},
'fees': {
'trading': {
'maker': 0.002,
'taker': 0.002,
},
'funding': {
'withdraw': {},
},
},
'commonCurrencies': {
'AIR': 'AirCoin',
'ANI': 'ANICoin',
'ANT': 'AntsCoin',
'AST': 'Astral',
'ATM': 'Autumncoin',
'BCC': 'BCH',
'BCS': 'BitcoinStake',
'BLN': 'Bulleon',
'BTS': 'Bitshares2',
'CAT': 'BitClave',
'COV': 'Coven Coin',
'CPC': 'Capricoin',
'CS': 'CryptoSpots',
'DCT': 'Discount',
'DGD': 'DarkGoldCoin',
'DROP': 'FaucetCoin',
'ERT': 'Eristica Token',
'ICN': 'iCoin',
'KNC': 'KingN Coin',
'LIZI': 'LiZi',
'LOC': 'LocoCoin',
'LOCX': 'LOC',
'LUN': 'LunarCoin',
'MDT': 'Midnight',
'NAV': 'NavajoCoin',
'OMG': 'OMGame',
'STK': 'StakeCoin',
'PAY': 'EPAY',
'PLC': 'Platin Coin',
'REP': 'Republicoin',
'RUR': 'RUB',
'XIN': 'XINCoin',
},
'options': {
'fetchOrdersRequiresSymbol': True,
},
})
def parse_order_status(self, status):
statuses = {
'0': 'open',
'1': 'closed',
'2': 'canceled',
'3': 'open', # or partially-filled and closed? https://github.com/ccxt/ccxt/issues/1594
}
if status in statuses:
return statuses[status]
return status
def fetch_balance(self, params={}):
self.load_markets()
response = self.privatePostGetInfo()
balances = response['return']
result = {'info': balances}
sides = {'free': 'funds', 'total': 'funds_incl_orders'}
keys = list(sides.keys())
for i in range(0, len(keys)):
key = keys[i]
side = sides[key]
if side in balances:
currencies = list(balances[side].keys())
for j in range(0, len(currencies)):
lowercase = currencies[j]
uppercase = lowercase.upper()
currency = self.common_currency_code(uppercase)
account = None
if currency in result:
account = result[currency]
else:
account = self.account()
account[key] = balances[side][lowercase]
if account['total'] and account['free']:
account['used'] = account['total'] - account['free']
result[currency] = account
return self.parse_balance(result)
def create_deposit_address(self, code, params={}):
response = self.fetch_deposit_address(code, self.extend({
'need_new': 1,
}, params))
address = self.safe_string(response, 'address')
self.check_address(address)
return {
'currency': code,
'address': address,
'status': 'ok',
'info': response['info'],
}
def fetch_deposit_address(self, code, params={}):
self.load_markets()
currency = self.currency(code)
request = {
'coinName': currency['id'],
'need_new': 0,
}
response = self.privatePostGetDepositAddress(self.extend(request, params))
address = self.safe_string(response['return'], 'address')
self.check_address(address)
return {
'currency': code,
'address': address,
'status': 'ok',
'info': response,
}
def withdraw(self, code, amount, address, tag=None, params={}):
self.check_address(address)
currency = self.currency(code)
self.load_markets()
response = self.privatePostWithdrawCoinsToAddress(self.extend({
'coinName': currency['id'],
'amount': amount,
'address': address,
}, params))
return {
'info': response,
'id': None,
}
def request(self, path, api='public', method='GET', params={}, headers=None, body=None):
response = self.fetch2(path, api, method, params, headers, body)
if 'success' in response:
if not response['success']:
if response['error'].find('Insufficient funds') >= 0: # not enougTh is a typo inside Liqui's own API...
raise InsufficientFunds(self.id + ' ' + self.json(response))
elif response['error'] == 'Requests too often':
raise DDoSProtection(self.id + ' ' + self.json(response))
elif (response['error'] == 'not available') or (response['error'] == 'external service unavailable'):
raise DDoSProtection(self.id + ' ' + self.json(response))
else:
raise ExchangeError(self.id + ' ' + self.json(response))
return response
| 36.559406
| 126
| 0.448206
|
t liqui
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import DDoSProtection
class yobit (liqui):
def describe(self):
return self.deep_extend(super(yobit, self).describe(), {
'id': 'yobit',
'name': 'YoBit',
'countries': 'RU',
'rateLimit': 3000,
'version': '3',
'has': {
'createDepositAddress': True,
'fetchDepositAddress': True,
'CORS': False,
'withdraw': True,
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/27766910-cdcbfdae-5eea-11e7-9859-03fea873272d.jpg',
'api': {
'public': 'https://yobit.net/api',
'private': 'https://yobit.net/tapi',
},
'www': 'https://www.yobit.net',
'doc': 'https://www.yobit.net/en/api/',
'fees': 'https://www.yobit.net/en/fees/',
},
'api': {
'public': {
'get': [
'depth/{pair}',
'info',
'ticker/{pair}',
'trades/{pair}',
],
},
'private': {
'post': [
'ActiveOrders',
'CancelOrder',
'GetDepositAddress',
'getInfo',
'OrderInfo',
'Trade',
'TradeHistory',
'WithdrawCoinsToAddress',
],
},
},
'fees': {
'trading': {
'maker': 0.002,
'taker': 0.002,
},
'funding': {
'withdraw': {},
},
},
'commonCurrencies': {
'AIR': 'AirCoin',
'ANI': 'ANICoin',
'ANT': 'AntsCoin',
'AST': 'Astral',
'ATM': 'Autumncoin',
'BCC': 'BCH',
'BCS': 'BitcoinStake',
'BLN': 'Bulleon',
'BTS': 'Bitshares2',
'CAT': 'BitClave',
'COV': 'Coven Coin',
'CPC': 'Capricoin',
'CS': 'CryptoSpots',
'DCT': 'Discount',
'DGD': 'DarkGoldCoin',
'DROP': 'FaucetCoin',
'ERT': 'Eristica Token',
'ICN': 'iCoin',
'KNC': 'KingN Coin',
'LIZI': 'LiZi',
'LOC': 'LocoCoin',
'LOCX': 'LOC',
'LUN': 'LunarCoin',
'MDT': 'Midnight',
'NAV': 'NavajoCoin',
'OMG': 'OMGame',
'STK': 'StakeCoin',
'PAY': 'EPAY',
'PLC': 'Platin Coin',
'REP': 'Republicoin',
'RUR': 'RUB',
'XIN': 'XINCoin',
},
'options': {
'fetchOrdersRequiresSymbol': True,
},
})
def parse_order_status(self, status):
statuses = {
'0': 'open',
'1': 'closed',
'2': 'canceled',
'3': 'open',
}
if status in statuses:
return statuses[status]
return status
def fetch_balance(self, params={}):
self.load_markets()
response = self.privatePostGetInfo()
balances = response['return']
result = {'info': balances}
sides = {'free': 'funds', 'total': 'funds_incl_orders'}
keys = list(sides.keys())
for i in range(0, len(keys)):
key = keys[i]
side = sides[key]
if side in balances:
currencies = list(balances[side].keys())
for j in range(0, len(currencies)):
lowercase = currencies[j]
uppercase = lowercase.upper()
currency = self.common_currency_code(uppercase)
account = None
if currency in result:
account = result[currency]
else:
account = self.account()
account[key] = balances[side][lowercase]
if account['total'] and account['free']:
account['used'] = account['total'] - account['free']
result[currency] = account
return self.parse_balance(result)
def create_deposit_address(self, code, params={}):
response = self.fetch_deposit_address(code, self.extend({
'need_new': 1,
}, params))
address = self.safe_string(response, 'address')
self.check_address(address)
return {
'currency': code,
'address': address,
'status': 'ok',
'info': response['info'],
}
def fetch_deposit_address(self, code, params={}):
self.load_markets()
currency = self.currency(code)
request = {
'coinName': currency['id'],
'need_new': 0,
}
response = self.privatePostGetDepositAddress(self.extend(request, params))
address = self.safe_string(response['return'], 'address')
self.check_address(address)
return {
'currency': code,
'address': address,
'status': 'ok',
'info': response,
}
def withdraw(self, code, amount, address, tag=None, params={}):
self.check_address(address)
currency = self.currency(code)
self.load_markets()
response = self.privatePostWithdrawCoinsToAddress(self.extend({
'coinName': currency['id'],
'amount': amount,
'address': address,
}, params))
return {
'info': response,
'id': None,
}
def request(self, path, api='public', method='GET', params={}, headers=None, body=None):
response = self.fetch2(path, api, method, params, headers, body)
if 'success' in response:
if not response['success']:
if response['error'].find('Insufficient funds') >= 0:
raise InsufficientFunds(self.id + ' ' + self.json(response))
elif response['error'] == 'Requests too often':
raise DDoSProtection(self.id + ' ' + self.json(response))
elif (response['error'] == 'not available') or (response['error'] == 'external service unavailable'):
raise DDoSProtection(self.id + ' ' + self.json(response))
else:
raise ExchangeError(self.id + ' ' + self.json(response))
return response
| true
| true
|
1c481e042f3cc47722722f39903e65876d4a4f00
| 979
|
py
|
Python
|
pyleecan/Methods/Mesh/ScalarProductL2/scalar_product.py
|
IrakozeFD/pyleecan
|
5a93bd98755d880176c1ce8ac90f36ca1b907055
|
[
"Apache-2.0"
] | 95
|
2019-01-23T04:19:45.000Z
|
2022-03-17T18:22:10.000Z
|
pyleecan/Methods/Mesh/ScalarProductL2/scalar_product.py
|
IrakozeFD/pyleecan
|
5a93bd98755d880176c1ce8ac90f36ca1b907055
|
[
"Apache-2.0"
] | 366
|
2019-02-20T07:15:08.000Z
|
2022-03-31T13:37:23.000Z
|
pyleecan/Methods/Mesh/ScalarProductL2/scalar_product.py
|
IrakozeFD/pyleecan
|
5a93bd98755d880176c1ce8ac90f36ca1b907055
|
[
"Apache-2.0"
] | 74
|
2019-01-24T01:47:31.000Z
|
2022-02-25T05:44:42.000Z
|
# -*- coding: utf-8 -*-
import numpy as np
def scalar_product(self, funca, funcb, detJ, weights, nb_gauss_points):
"""Scalar product of shape functions with L2 gauss integration
Parameters
----------
self : ScalarProductL2
a ScalarProductL2 object
funca : ndarray
vertice of the cell
nba : ndarray
coordinates of a point
funcb : ndarray
vertice of the cell
nbb : ndarray
coordinates of a point
detJ : ndarray
jacobian determinant evaluated for each gauss point
weights : ndarray
gauss weights
nb_gauss_points : int
number of gauss points
Returns
-------
l2_scal : ndarray
a L2 scalar product
"""
func_a_w_dJ = np.zeros(funca.shape)
for i in range(nb_gauss_points):
func_a_w_dJ[i, :] = funca[i, :] * weights[i] * detJ[i]
l2_scal_mat = np.squeeze(np.tensordot(func_a_w_dJ, funcb, axes=([0], [0])))
return l2_scal_mat
| 23.878049
| 79
| 0.622063
|
import numpy as np
def scalar_product(self, funca, funcb, detJ, weights, nb_gauss_points):
func_a_w_dJ = np.zeros(funca.shape)
for i in range(nb_gauss_points):
func_a_w_dJ[i, :] = funca[i, :] * weights[i] * detJ[i]
l2_scal_mat = np.squeeze(np.tensordot(func_a_w_dJ, funcb, axes=([0], [0])))
return l2_scal_mat
| true
| true
|
1c481e4bd46ecaaf5421792b882173bf343bf1cb
| 12,093
|
py
|
Python
|
tests/python/pants_test/backend/project_info/tasks/test_export_integration.py
|
sammy-1234/pants
|
889016952a248cf229c78c014d9f6c95422d98b8
|
[
"Apache-2.0"
] | null | null | null |
tests/python/pants_test/backend/project_info/tasks/test_export_integration.py
|
sammy-1234/pants
|
889016952a248cf229c78c014d9f6c95422d98b8
|
[
"Apache-2.0"
] | null | null | null |
tests/python/pants_test/backend/project_info/tasks/test_export_integration.py
|
sammy-1234/pants
|
889016952a248cf229c78c014d9f6c95422d98b8
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import json
import os
import re
import subprocess
from twitter.common.collections import maybe_list
from pants.base.build_environment import get_buildroot
from pants.build_graph.intermediate_target_factory import hash_target
from pants_test.backend.project_info.tasks.resolve_jars_test_mixin import ResolveJarsTestMixin
from pants_test.pants_run_integration_test import PantsRunIntegrationTest, ensure_resolver
class ExportIntegrationTest(ResolveJarsTestMixin, PantsRunIntegrationTest):
_confs_args = [
'--export-libraries-sources',
'--export-libraries-javadocs',
]
def run_export(self, test_target, workdir, load_libs=False, only_default=False, extra_args=None):
"""Runs ./pants export ... and returns its json output.
:param string|list test_target: spec of the targets to run on.
:param string workdir: working directory to run pants with.
:param bool load_libs: whether to load external libraries (of any conf).
:param bool only_default: if loading libraries, whether to only resolve the default conf, or to
additionally resolve sources and javadocs.
:param list extra_args: list of extra arguments for the pants invocation.
:return: the json output of the console task.
:rtype: dict
"""
export_out_file = os.path.join(workdir, 'export_out.txt')
args = ['export',
'--output-file={out_file}'.format(out_file=export_out_file)] + maybe_list(test_target)
libs_args = ['--no-export-libraries'] if not load_libs else self._confs_args
if load_libs and only_default:
libs_args = []
pants_run = self.run_pants_with_workdir(args + libs_args + (extra_args or []), workdir)
self.assert_success(pants_run)
self.assertTrue(os.path.exists(export_out_file),
msg='Could not find export output file in {out_file}'
.format(out_file=export_out_file))
with open(export_out_file, 'r') as json_file:
json_data = json.load(json_file)
if not load_libs:
self.assertIsNone(json_data.get('libraries'))
return json_data
def evaluate_subtask(self, targets, workdir, load_extra_confs, extra_args, expected_jars):
json_data = self.run_export(targets, workdir, load_libs=True, only_default=not load_extra_confs,
extra_args=extra_args)
for jar in expected_jars:
self.assertIn(jar, json_data['libraries'])
for path in json_data['libraries'][jar].values():
self.assertTrue(os.path.exists(path), 'Expected jar at {} to actually exist.'.format(path))
@ensure_resolver
def test_export_code_gen(self):
with self.temporary_workdir() as workdir:
test_target = 'examples/tests/java/org/pantsbuild/example/usethrift:usethrift'
json_data = self.run_export(test_target, workdir, load_libs=True)
thrift_target_name = ('examples.src.thrift.org.pantsbuild.example.precipitation'
'.precipitation-java')
codegen_target_regex = os.path.join(os.path.relpath(workdir, get_buildroot()),
'gen/thrift-java/[^/]*/[^/:]*/[^/:]*:{0}'.format(thrift_target_name))
p = re.compile(codegen_target_regex)
self.assertTrue(any(p.match(target) for target in json_data.get('targets').keys()))
@ensure_resolver
def test_export_json_transitive_jar(self):
with self.temporary_workdir() as workdir:
test_target = 'examples/tests/java/org/pantsbuild/example/usethrift:usethrift'
json_data = self.run_export(test_target, workdir, load_libs=True)
targets = json_data.get('targets')
self.assertIn('org.hamcrest:hamcrest-core:1.3', targets[test_target]['libraries'])
@ensure_resolver
def test_export_jar_path_with_excludes(self):
with self.temporary_workdir() as workdir:
test_target = 'testprojects/src/java/org/pantsbuild/testproject/exclude:foo'
json_data = self.run_export(test_target, workdir, load_libs=True)
self.assertIsNone(json_data
.get('libraries')
.get('com.typesafe.sbt:incremental-compiler:0.13.7'))
foo_target = (json_data
.get('targets')
.get('testprojects/src/java/org/pantsbuild/testproject/exclude:foo'))
self.assertTrue('com.typesafe.sbt:incremental-compiler' in foo_target.get('excludes'))
@ensure_resolver
def test_export_jar_path_with_excludes_soft(self):
with self.temporary_workdir() as workdir:
test_target = 'testprojects/src/java/org/pantsbuild/testproject/exclude:'
json_data = self.run_export(test_target,
workdir,
load_libs=True,
extra_args=['--export-soft-excludes'])
self.assertIsNotNone(json_data
.get('libraries')
.get('com.martiansoftware:nailgun-server:0.9.1'))
self.assertIsNotNone(json_data.get('libraries').get('org.pantsbuild:jmake:1.3.8-10'))
foo_target = (json_data
.get('targets')
.get('testprojects/src/java/org/pantsbuild/testproject/exclude:foo'))
self.assertTrue('com.typesafe.sbt:incremental-compiler' in foo_target.get('excludes'))
self.assertTrue('org.pantsbuild' in foo_target.get('excludes'))
@ensure_resolver
def test_export_jar_path(self):
with self.temporary_workdir() as workdir:
test_target = 'examples/tests/java/org/pantsbuild/example/usethrift:usethrift'
json_data = self.run_export(test_target, workdir, load_libs=True)
common_lang_lib_info = json_data.get('libraries').get('junit:junit:4.12')
self.assertIsNotNone(common_lang_lib_info)
self.assertIn(
'junit-4.12.jar',
common_lang_lib_info.get('default')
)
self.assertIn(
'junit-4.12-javadoc.jar',
common_lang_lib_info.get('javadoc')
)
self.assertIn(
'junit-4.12-sources.jar',
common_lang_lib_info.get('sources')
)
@ensure_resolver
def test_dep_map_for_java_sources(self):
with self.temporary_workdir() as workdir:
test_target = 'examples/src/scala/org/pantsbuild/example/scala_with_java_sources'
json_data = self.run_export(test_target, workdir)
targets = json_data.get('targets')
self.assertIn('examples/src/java/org/pantsbuild/example/java_sources:java_sources', targets)
@ensure_resolver
def test_sources_and_javadocs(self):
with self.temporary_workdir() as workdir:
test_target = 'testprojects/src/scala/org/pantsbuild/testproject/unicode/shapeless'
json_data = self.run_export(test_target, workdir, load_libs=True)
shapeless_lib = json_data.get('libraries').get('com.chuusai:shapeless_2.12:2.3.2')
self.assertIsNotNone(shapeless_lib)
self.assertIsNotNone(shapeless_lib['default'])
self.assertIsNotNone(shapeless_lib['sources'])
self.assertIsNotNone(shapeless_lib['javadoc'])
@ensure_resolver
def test_classifiers(self):
with self.temporary_workdir() as workdir:
test_target = 'testprojects/tests/java/org/pantsbuild/testproject/ivyclassifier:ivyclassifier'
json_data = self.run_export(test_target, workdir, load_libs=True)
avro_lib_info = json_data.get('libraries').get('org.apache.avro:avro:1.7.7')
self.assertIsNotNone(avro_lib_info)
self.assertIn(
'avro-1.7.7.jar',
avro_lib_info.get('default'),
)
self.assertIn(
'avro-1.7.7-tests.jar',
avro_lib_info.get('tests'),
)
self.assertIn(
'avro-1.7.7-javadoc.jar',
avro_lib_info.get('javadoc'),
)
self.assertIn(
'avro-1.7.7-sources.jar',
avro_lib_info.get('sources'),
)
@ensure_resolver
def test_distributions_and_platforms(self):
with self.temporary_workdir() as workdir:
test_target = 'examples/src/java/org/pantsbuild/example/hello/simple'
json_data = self.run_export(test_target, workdir, load_libs=False, extra_args=[
'--jvm-platform-default-platform=java7',
'--jvm-platform-platforms={'
' "java7": {"source": "1.7", "target": "1.7", "args": [ "-X123" ]},'
' "java8": {"source": "1.8", "target": "1.8", "args": [ "-X456" ]}'
'}',
'--jvm-distributions-paths={'
' "macos": [ "/Library/JDK" ],'
' "linux": [ "/usr/lib/jdk7", "/usr/lib/jdk8"]'
'}'
])
self.assertFalse('python_setup' in json_data)
target_name = 'examples/src/java/org/pantsbuild/example/hello/simple:simple'
targets = json_data.get('targets')
self.assertEqual('java7', targets[target_name]['platform'])
self.assertEqual(
{
'default_platform' : 'java7',
'platforms': {
'java7': {
'source_level': '1.7',
'args': ['-X123'],
'target_level': '1.7'},
'java8': {
'source_level': '1.8',
'args': ['-X456'],
'target_level': '1.8'},
}
},
json_data['jvm_platforms'])
@ensure_resolver
def test_test_platform(self):
with self.temporary_workdir() as workdir:
test_target = 'testprojects/tests/java/org/pantsbuild/testproject/testjvms:eight-test-platform'
json_data = self.run_export(test_target, workdir)
self.assertEqual('java7', json_data['targets'][test_target]['platform'])
self.assertEqual('java8', json_data['targets'][test_target]['test_platform'])
@ensure_resolver
def test_intellij_integration(self):
with self.temporary_workdir() as workdir:
exported_file = os.path.join(workdir, "export_file.json")
p = subprocess.Popen(['build-support/pants-intellij.sh', '--export-output-file=' + exported_file],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.communicate()
self.assertEqual(p.returncode, 0)
with open(exported_file, 'r') as data_file:
json_data = json.load(data_file)
python_setup = json_data['python_setup']
self.assertIsNotNone(python_setup)
self.assertIsNotNone(python_setup['interpreters'])
default_interpreter = python_setup['default_interpreter']
self.assertIsNotNone(default_interpreter)
self.assertIsNotNone(python_setup['interpreters'][default_interpreter])
self.assertTrue(os.path.exists(python_setup['interpreters'][default_interpreter]['binary']))
self.assertTrue(os.path.exists(python_setup['interpreters'][default_interpreter]['chroot']))
python_target = json_data['targets']['src/python/pants/backend/python/targets:targets']
self.assertIsNotNone(python_target)
self.assertEqual(default_interpreter, python_target['python_interpreter'])
@ensure_resolver
def test_intransitive_and_scope(self):
with self.temporary_workdir() as workdir:
test_path = 'testprojects/maven_layout/provided_patching/one/src/main/java'
test_target = '{}:common'.format(test_path)
json_data = self.run_export(test_target, workdir)
h = hash_target('{}:shadow'.format(test_path), 'provided')
synthetic_target = '{}:shadow-unstable-provided-{}'.format(test_path, h)
self.assertEqual(False, json_data['targets'][synthetic_target]['transitive'])
self.assertEqual('compile test', json_data['targets'][synthetic_target]['scope'])
@ensure_resolver
def test_export_is_target_roots(self):
with self.temporary_workdir() as workdir:
test_target = 'examples/tests/java/org/pantsbuild/example/::'
json_data = self.run_export(test_target, workdir, load_libs=False)
for target_address, attributes in json_data['targets'].items():
# Make sure all targets under `test_target`'s directory are target roots.
self.assertEqual(
attributes['is_target_root'],
target_address.startswith("examples/tests/java/org/pantsbuild/example")
)
| 45.633962
| 111
| 0.678326
|
import json
import os
import re
import subprocess
from twitter.common.collections import maybe_list
from pants.base.build_environment import get_buildroot
from pants.build_graph.intermediate_target_factory import hash_target
from pants_test.backend.project_info.tasks.resolve_jars_test_mixin import ResolveJarsTestMixin
from pants_test.pants_run_integration_test import PantsRunIntegrationTest, ensure_resolver
class ExportIntegrationTest(ResolveJarsTestMixin, PantsRunIntegrationTest):
_confs_args = [
'--export-libraries-sources',
'--export-libraries-javadocs',
]
def run_export(self, test_target, workdir, load_libs=False, only_default=False, extra_args=None):
export_out_file = os.path.join(workdir, 'export_out.txt')
args = ['export',
'--output-file={out_file}'.format(out_file=export_out_file)] + maybe_list(test_target)
libs_args = ['--no-export-libraries'] if not load_libs else self._confs_args
if load_libs and only_default:
libs_args = []
pants_run = self.run_pants_with_workdir(args + libs_args + (extra_args or []), workdir)
self.assert_success(pants_run)
self.assertTrue(os.path.exists(export_out_file),
msg='Could not find export output file in {out_file}'
.format(out_file=export_out_file))
with open(export_out_file, 'r') as json_file:
json_data = json.load(json_file)
if not load_libs:
self.assertIsNone(json_data.get('libraries'))
return json_data
def evaluate_subtask(self, targets, workdir, load_extra_confs, extra_args, expected_jars):
json_data = self.run_export(targets, workdir, load_libs=True, only_default=not load_extra_confs,
extra_args=extra_args)
for jar in expected_jars:
self.assertIn(jar, json_data['libraries'])
for path in json_data['libraries'][jar].values():
self.assertTrue(os.path.exists(path), 'Expected jar at {} to actually exist.'.format(path))
@ensure_resolver
def test_export_code_gen(self):
with self.temporary_workdir() as workdir:
test_target = 'examples/tests/java/org/pantsbuild/example/usethrift:usethrift'
json_data = self.run_export(test_target, workdir, load_libs=True)
thrift_target_name = ('examples.src.thrift.org.pantsbuild.example.precipitation'
'.precipitation-java')
codegen_target_regex = os.path.join(os.path.relpath(workdir, get_buildroot()),
'gen/thrift-java/[^/]*/[^/:]*/[^/:]*:{0}'.format(thrift_target_name))
p = re.compile(codegen_target_regex)
self.assertTrue(any(p.match(target) for target in json_data.get('targets').keys()))
@ensure_resolver
def test_export_json_transitive_jar(self):
with self.temporary_workdir() as workdir:
test_target = 'examples/tests/java/org/pantsbuild/example/usethrift:usethrift'
json_data = self.run_export(test_target, workdir, load_libs=True)
targets = json_data.get('targets')
self.assertIn('org.hamcrest:hamcrest-core:1.3', targets[test_target]['libraries'])
@ensure_resolver
def test_export_jar_path_with_excludes(self):
with self.temporary_workdir() as workdir:
test_target = 'testprojects/src/java/org/pantsbuild/testproject/exclude:foo'
json_data = self.run_export(test_target, workdir, load_libs=True)
self.assertIsNone(json_data
.get('libraries')
.get('com.typesafe.sbt:incremental-compiler:0.13.7'))
foo_target = (json_data
.get('targets')
.get('testprojects/src/java/org/pantsbuild/testproject/exclude:foo'))
self.assertTrue('com.typesafe.sbt:incremental-compiler' in foo_target.get('excludes'))
@ensure_resolver
def test_export_jar_path_with_excludes_soft(self):
with self.temporary_workdir() as workdir:
test_target = 'testprojects/src/java/org/pantsbuild/testproject/exclude:'
json_data = self.run_export(test_target,
workdir,
load_libs=True,
extra_args=['--export-soft-excludes'])
self.assertIsNotNone(json_data
.get('libraries')
.get('com.martiansoftware:nailgun-server:0.9.1'))
self.assertIsNotNone(json_data.get('libraries').get('org.pantsbuild:jmake:1.3.8-10'))
foo_target = (json_data
.get('targets')
.get('testprojects/src/java/org/pantsbuild/testproject/exclude:foo'))
self.assertTrue('com.typesafe.sbt:incremental-compiler' in foo_target.get('excludes'))
self.assertTrue('org.pantsbuild' in foo_target.get('excludes'))
@ensure_resolver
def test_export_jar_path(self):
with self.temporary_workdir() as workdir:
test_target = 'examples/tests/java/org/pantsbuild/example/usethrift:usethrift'
json_data = self.run_export(test_target, workdir, load_libs=True)
common_lang_lib_info = json_data.get('libraries').get('junit:junit:4.12')
self.assertIsNotNone(common_lang_lib_info)
self.assertIn(
'junit-4.12.jar',
common_lang_lib_info.get('default')
)
self.assertIn(
'junit-4.12-javadoc.jar',
common_lang_lib_info.get('javadoc')
)
self.assertIn(
'junit-4.12-sources.jar',
common_lang_lib_info.get('sources')
)
@ensure_resolver
def test_dep_map_for_java_sources(self):
with self.temporary_workdir() as workdir:
test_target = 'examples/src/scala/org/pantsbuild/example/scala_with_java_sources'
json_data = self.run_export(test_target, workdir)
targets = json_data.get('targets')
self.assertIn('examples/src/java/org/pantsbuild/example/java_sources:java_sources', targets)
@ensure_resolver
def test_sources_and_javadocs(self):
with self.temporary_workdir() as workdir:
test_target = 'testprojects/src/scala/org/pantsbuild/testproject/unicode/shapeless'
json_data = self.run_export(test_target, workdir, load_libs=True)
shapeless_lib = json_data.get('libraries').get('com.chuusai:shapeless_2.12:2.3.2')
self.assertIsNotNone(shapeless_lib)
self.assertIsNotNone(shapeless_lib['default'])
self.assertIsNotNone(shapeless_lib['sources'])
self.assertIsNotNone(shapeless_lib['javadoc'])
@ensure_resolver
def test_classifiers(self):
with self.temporary_workdir() as workdir:
test_target = 'testprojects/tests/java/org/pantsbuild/testproject/ivyclassifier:ivyclassifier'
json_data = self.run_export(test_target, workdir, load_libs=True)
avro_lib_info = json_data.get('libraries').get('org.apache.avro:avro:1.7.7')
self.assertIsNotNone(avro_lib_info)
self.assertIn(
'avro-1.7.7.jar',
avro_lib_info.get('default'),
)
self.assertIn(
'avro-1.7.7-tests.jar',
avro_lib_info.get('tests'),
)
self.assertIn(
'avro-1.7.7-javadoc.jar',
avro_lib_info.get('javadoc'),
)
self.assertIn(
'avro-1.7.7-sources.jar',
avro_lib_info.get('sources'),
)
@ensure_resolver
def test_distributions_and_platforms(self):
with self.temporary_workdir() as workdir:
test_target = 'examples/src/java/org/pantsbuild/example/hello/simple'
json_data = self.run_export(test_target, workdir, load_libs=False, extra_args=[
'--jvm-platform-default-platform=java7',
'--jvm-platform-platforms={'
' "java7": {"source": "1.7", "target": "1.7", "args": [ "-X123" ]},'
' "java8": {"source": "1.8", "target": "1.8", "args": [ "-X456" ]}'
'}',
'--jvm-distributions-paths={'
' "macos": [ "/Library/JDK" ],'
' "linux": [ "/usr/lib/jdk7", "/usr/lib/jdk8"]'
'}'
])
self.assertFalse('python_setup' in json_data)
target_name = 'examples/src/java/org/pantsbuild/example/hello/simple:simple'
targets = json_data.get('targets')
self.assertEqual('java7', targets[target_name]['platform'])
self.assertEqual(
{
'default_platform' : 'java7',
'platforms': {
'java7': {
'source_level': '1.7',
'args': ['-X123'],
'target_level': '1.7'},
'java8': {
'source_level': '1.8',
'args': ['-X456'],
'target_level': '1.8'},
}
},
json_data['jvm_platforms'])
@ensure_resolver
def test_test_platform(self):
with self.temporary_workdir() as workdir:
test_target = 'testprojects/tests/java/org/pantsbuild/testproject/testjvms:eight-test-platform'
json_data = self.run_export(test_target, workdir)
self.assertEqual('java7', json_data['targets'][test_target]['platform'])
self.assertEqual('java8', json_data['targets'][test_target]['test_platform'])
@ensure_resolver
def test_intellij_integration(self):
with self.temporary_workdir() as workdir:
exported_file = os.path.join(workdir, "export_file.json")
p = subprocess.Popen(['build-support/pants-intellij.sh', '--export-output-file=' + exported_file],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.communicate()
self.assertEqual(p.returncode, 0)
with open(exported_file, 'r') as data_file:
json_data = json.load(data_file)
python_setup = json_data['python_setup']
self.assertIsNotNone(python_setup)
self.assertIsNotNone(python_setup['interpreters'])
default_interpreter = python_setup['default_interpreter']
self.assertIsNotNone(default_interpreter)
self.assertIsNotNone(python_setup['interpreters'][default_interpreter])
self.assertTrue(os.path.exists(python_setup['interpreters'][default_interpreter]['binary']))
self.assertTrue(os.path.exists(python_setup['interpreters'][default_interpreter]['chroot']))
python_target = json_data['targets']['src/python/pants/backend/python/targets:targets']
self.assertIsNotNone(python_target)
self.assertEqual(default_interpreter, python_target['python_interpreter'])
@ensure_resolver
def test_intransitive_and_scope(self):
with self.temporary_workdir() as workdir:
test_path = 'testprojects/maven_layout/provided_patching/one/src/main/java'
test_target = '{}:common'.format(test_path)
json_data = self.run_export(test_target, workdir)
h = hash_target('{}:shadow'.format(test_path), 'provided')
synthetic_target = '{}:shadow-unstable-provided-{}'.format(test_path, h)
self.assertEqual(False, json_data['targets'][synthetic_target]['transitive'])
self.assertEqual('compile test', json_data['targets'][synthetic_target]['scope'])
@ensure_resolver
def test_export_is_target_roots(self):
with self.temporary_workdir() as workdir:
test_target = 'examples/tests/java/org/pantsbuild/example/::'
json_data = self.run_export(test_target, workdir, load_libs=False)
for target_address, attributes in json_data['targets'].items():
self.assertEqual(
attributes['is_target_root'],
target_address.startswith("examples/tests/java/org/pantsbuild/example")
)
| true
| true
|
1c481e629927a83e32294493a0c2118b75b7d1f9
| 12,288
|
py
|
Python
|
openpirates-api.py
|
dhagell/openpirates-api
|
68e4d626c2fae15d5e95427302e2a97db306591c
|
[
"MIT"
] | null | null | null |
openpirates-api.py
|
dhagell/openpirates-api
|
68e4d626c2fae15d5e95427302e2a97db306591c
|
[
"MIT"
] | null | null | null |
openpirates-api.py
|
dhagell/openpirates-api
|
68e4d626c2fae15d5e95427302e2a97db306591c
|
[
"MIT"
] | null | null | null |
from flask import Flask, abort
from flask import jsonify
from google.cloud import storage
from google.oauth2 import service_account
from PIL import Image
import os
import mimetypes
GOOGLE_STORAGE_PROJECT = os.environ['GOOGLE_STORAGE_PROJECT']
GOOGLE_STORAGE_BUCKET = os.environ['GOOGLE_STORAGE_BUCKET']
app = Flask(__name__)
########################################################################
# Data
########################################################################
# opensea-pirates
FIRST_NAMES = ['Herbie', 'Sprinkles', 'Boris', 'Dave', 'Randy', 'Captain']
LAST_NAMES = ['Starbelly', 'Fisherton', 'McCoy']
BASES = ['jellyfish', 'starfish', 'crab', 'narwhal', 'tealfish', 'goldfish']
EYES = ['big', 'joy', 'wink', 'sleepy', 'content']
MOUTH = ['happy', 'surprised', 'pleased', 'cute']
HAT = ['tricorn', 'bicorn', 'small', 'scarf','cap']
SWORD = ['cutlass', 'saber', 'scimitar', 'dagger']
GLYPHS = ['parrot', 'frog', 'snake']
GREEKSYMBOL = ['omega', 'alpha', 'beta']
INT_ATTRIBUTES = [5, 2, 3, 4, 8]
FLOAT_ATTRIBUTES = [1.4, 2.3, 11.7, 90.2, 1.2]
STR_ATTRIBUTES = [
'Happy',
'Sad',
'Sleepy',
'Boring'
]
BOOST_ATTRIBUTES = [10, 40, 30]
PERCENT_BOOST_ATTRIBUTES = [5, 10, 15]
NUMBER_ATTRIBUTES = [1, 2, 1, 1]
# opensea-pirates-treasures
ACCESSORIES_IMAGES = [
'Bamboo-flute.png',
'Life-ring.png',
'Message-in-a-bottle.png',
'Pearl.png',
'Scuba-mask.png',
'Trident.png'
]
ACCESSORIES_NAMES = [a.replace('-', ' ').replace('.png', '')
for a in ACCESSORIES_IMAGES]
ACCESSORIES_ATTS_INT = [200, 11, 3, 41, 9, 172]
ACCESSORIES_ATTS_PERCENT = [5, 10, 1, 20, 15, 25]
ACCESSORIES_ATTS_LOCATION = ['Head', 'Body', 'Held', 'Held', 'Head', 'Held']
ACCESSORIES_ATTS_RARITY = [
'Common',
'Rare',
'Legendary',
'Epic',
'Divine',
'Hidden'
]
ACCESSORIES_ATTS_DEPTH = [
'beach',
'shore',
'shallows',
'deeps',
'shore',
'deeps'
]
ACCESSORIES_ATTS_GENERATION = [1, 1, 2, 1, 1, 3]
# contractURI() support
CONTRACT_URI_METADATA = {
'opensea-pirates': {
'name': 'OpenSea Pirates',
'description': 'Friendly pirates of the sea.',
'image': 'https://example.com/image.png',
'external_link': 'https://github.com/dhagell/opensea-pirates/'
},
'opensea-erc1155': {
'name': 'OpenSea Pirate Treasures',
'description': "Fun and useful treasures for your OpenSea pirates.",
'image': 'https://example.com/image.png',
'external_link': 'https://github.com/dhagell/opensea-erc1155/'
}
}
CONTRACT_URI_METADATA_AVAILABLE = CONTRACT_URI_METADATA.keys()
########################################################################
# Routes
########################################################################
# opensea-pirates
@app.route('/api/pirate/<token_id>')
def pirate(token_id):
token_id = int(token_id)
num_first_names = len(FIRST_NAMES)
num_last_names = len(LAST_NAMES)
pirate_name = '%s %s' % (FIRST_NAMES[token_id % num_first_names], LAST_NAMES[token_id % num_last_names])
base = BASES[token_id % len(BASES)]
eyes = EYES[token_id % len(EYES)]
mouth = MOUTH[token_id % len(MOUTH)]
image_url = _compose_image(['images/bases/base-%s.png' % base,
'images/eyes/eyes-%s.png' % eyes,
'images/mouths/mouth-%s.png' % mouth],
token_id)
attributes = []
_add_attribute(attributes, 'Base', BASES, token_id)
_add_attribute(attributes, 'Eyes', EYES, token_id)
_add_attribute(attributes, 'Mouth', MOUTH, token_id)
_add_attribute(attributes, 'Level', INT_ATTRIBUTES, token_id)
_add_attribute(attributes, 'Stamina', FLOAT_ATTRIBUTES, token_id)
_add_attribute(attributes, 'Personality', STR_ATTRIBUTES, token_id)
_add_attribute(attributes, 'Aqua Power', BOOST_ATTRIBUTES, token_id, display_type='boost_number')
_add_attribute(attributes, 'Stamina Increase', PERCENT_BOOST_ATTRIBUTES, token_id, display_type='boost_percentage')
_add_attribute(attributes, 'Generation', NUMBER_ATTRIBUTES, token_id, display_type='number')
return jsonify({
'name': pirate_name,
'description': 'Friendly OpenSea Pirate that enjoys a long sail in the ocean.',
'image': image_url,
'external_url': 'https://openseapirates.io/%s' % token_id,
'attributes': attributes
})
@app.route('/api/box/pirate/<token_id>')
def pirate_box(token_id):
token_id = int(token_id)
image_url = _compose_image(['images/box/bootybox.png'], token_id, 'box')
attributes = []
_add_attribute(attributes, 'number_inside', [3], token_id)
return jsonify({
'name': 'Pirate Booty Box',
'description': 'This bootybox contains some OpenSea Pirates! It can also be traded!',
'image': image_url,
'external_url': 'https://openseapirates.io/%s' % token_id,
'attributes': attributes
})
@app.route('/api/factory/pirate/<token_id>')
def pirate_factory(token_id):
token_id = int(token_id)
if token_id == 0:
name = 'One OpenSea Pirate'
description = 'When you purchase this option, you will receive a single OpenSea pirate of a random variety. ' \
'Enjoy and take good care of your aquatic being!'
image_url = _compose_image(['images/factory/egg.png'], token_id, 'factory')
num_inside = 1
elif token_id == 1:
name = 'Four OpenSea Pirates'
description = 'When you purchase this option, you will receive four OpenSea pirates of random variety. ' \
'Enjoy and take good care of your aquatic beings!'
image_url = _compose_image(['images/factory/four-eggs.png'], token_id, 'factory')
num_inside = 4
elif token_id == 2:
name = 'One OpenSea Pirate bootybox'
description = 'When you purchase this option, you will receive one bootybox, which can be opened to reveal three ' \
'OpenSea pirates of random variety. Enjoy and take good care of these cute aquatic beings!'
image_url = _compose_image(['images/box/bootybox.png'], token_id, 'factory')
num_inside = 3
attributes = []
_add_attribute(attributes, 'number_inside', [num_inside], token_id)
return jsonify({
'name': name,
'description': description,
'image': image_url,
'external_url': 'https://openseapirates.io/%s' % token_id,
'attributes': attributes
})
# opensea-pirates-treasures
@app.route('/api/treasure/<token_id>')
def treasure(token_id):
token_id = int(token_id)
num_treasures = len(ACCESSORIES_NAMES)
if token_id >= num_treasures:
abort(404, description='No such token')
treasure_name = ACCESSORIES_NAMES[token_id]
image_path = 'images/treasure/%s' % ACCESSORIES_IMAGES[token_id]
image_url = _bucket_image(image_path, token_id, 'treasure')
attributes = []
_add_attribute(attributes, 'Aqua Boost', ACCESSORIES_ATTS_INT, token_id, display_type='boost_number')
_add_attribute(attributes, 'Stamina Increase', ACCESSORIES_ATTS_PERCENT, token_id, display_type='boost_percentage')
_add_attribute(attributes, 'Location', ACCESSORIES_ATTS_LOCATION, token_id)
_add_attribute(attributes, 'Depth', ACCESSORIES_ATTS_DEPTH, token_id)
_add_attribute(attributes, 'Rarity', ACCESSORIES_ATTS_RARITY, token_id)
_add_attribute(attributes, 'Generation', ACCESSORIES_ATTS_GENERATION, token_id, display_type='number')
return jsonify({
'name': treasure_name,
'description': 'A fun and useful treasure for your friendly OpenSea pirates.',
'image': image_url,
'external_url': 'https://openseapirates.io/treasure/%s' % token_id,
'attributes': attributes
})
@app.route('/api/box/treasure/<token_id>')
def treasure_box(token_id):
token_id = int(token_id)
image_url = _compose_image(['images/box/bootybox.png'], token_id, 'box')
attributes = []
_add_attribute(attributes, 'number_inside', [3], token_id)
return jsonify({
'name': 'Treasure Booty Box',
'description': 'This bootybox contains some OpenSea Pirate treasures! It can also be traded!',
'image': image_url,
'external_url': 'https://openseapirates.io/box/treasure/%s' % token_id,
'attributes': attributes
})
@app.route('/api/factory/treasure/<token_id>')
def treasure_factory(token_id):
token_id = int(token_id)
if token_id == 0:
name = 'One OpenSea Pirate Treasure'
description = 'When you purchase this option, you will receive a single OpenSea pirate treasure of a random variety. ' \
'Enjoy and take good care of your aquatic being!'
image_url = _compose_image(['images/factory/egg.png'], token_id, 'factory')
num_inside = 1
elif token_id == 1:
name = 'Four OpenSea Pirate Treasures'
description = 'When you purchase this option, you will receive four OpenSea pirate treasures of random variety. ' \
'Enjoy and take good care of your aquatic beings!'
image_url = _compose_image(['images/factory/four-eggs.png'], token_id, 'factory')
num_inside = 4
elif token_id == 2:
name = 'One OpenSea Pirate Treasure BootyBox'
description = 'When you purchase this option, you will receive one bootybox, which can be opened to reveal three ' \
'OpenSea pirate treasures of random variety. Enjoy and take good care of these cute aquatic beings!'
image_url = _compose_image(['images/box/bootybox.png'], token_id, 'factory')
num_inside = 3
attributes = []
_add_attribute(attributes, 'number_inside', [num_inside], token_id)
return jsonify({
'name': name,
'description': description,
'image': image_url,
'external_url': 'https://openseapirates.io/%s' % token_id,
'attributes': attributes
})
# contractURI()
@app.route('/contract/<contract_name>')
def contract_uri(contract_name):
if not contract_name in CONTRACT_URI_METADATA_AVAILABLE:
abort(404, description='Resource not found')
return jsonify(CONTRACT_URI_METADATA[contract_name])
# Error handling
@app.errorhandler(404)
def resource_not_found(e):
return jsonify(error=str(e)), 404
########################################################################
# Utility code
########################################################################
def _add_attribute(existing, attribute_name, options, token_id, display_type=None):
trait = {
'trait_type': attribute_name,
'value': options[token_id % len(options)]
}
if display_type:
trait['display_type'] = display_type
existing.append(trait)
def _compose_image(image_files, token_id, path='pirate'):
composite = None
for image_file in image_files:
foreground = Image.open(image_file).convert('RGBA')
if composite:
composite = Image.alpha_composite(composite, foreground)
else:
composite = foreground
output_path = 'images/output/%s.png' % token_id
composite.save(output_path)
blob = _get_bucket().blob(f'{path}/{token_id}.png')
blob.upload_from_filename(filename=output_path)
return blob.public_url
def _bucket_image(image_path, token_id, path='treasure'):
blob = _get_bucket().blob(f'{path}/{token_id}.png')
blob.upload_from_filename(filename=image_path)
return blob.public_url
def _get_bucket():
credentials = service_account.Credentials.from_service_account_file('credentials/google-storage-credentials.json')
if credentials.requires_scopes:
credentials = credentials.with_scopes(['https://www.googleapis.com/auth/devstorage.read_write'])
client = storage.Client(project=GOOGLE_STORAGE_PROJECT, credentials=credentials)
return client.get_bucket(GOOGLE_STORAGE_BUCKET)
@app.route("/")
def home():
return render_template('index.html')
########################################################################
# Main flow of execution
########################################################################
if __name__ == '__main__':
app.run(debug=True, use_reloader=True)
| 36.035191
| 128
| 0.6368
|
from flask import Flask, abort
from flask import jsonify
from google.cloud import storage
from google.oauth2 import service_account
from PIL import Image
import os
import mimetypes
GOOGLE_STORAGE_PROJECT = os.environ['GOOGLE_STORAGE_PROJECT']
GOOGLE_STORAGE_BUCKET = os.environ['GOOGLE_STORAGE_BUCKET']
app = Flask(__name__)
| true
| true
|
1c481ebd72b3d31925f9e66052d9b12423ea6d1f
| 5,720
|
py
|
Python
|
src/putty/settings/ssh_host_keys/__init__.py
|
KalleDK/putty.settings
|
d8a7803bd6df343f53f6a8d404c8a1ceb7bc2436
|
[
"MIT"
] | 1
|
2021-05-25T10:42:15.000Z
|
2021-05-25T10:42:15.000Z
|
src/putty/settings/ssh_host_keys/__init__.py
|
KalleDK/putty.settings
|
d8a7803bd6df343f53f6a8d404c8a1ceb7bc2436
|
[
"MIT"
] | null | null | null |
src/putty/settings/ssh_host_keys/__init__.py
|
KalleDK/putty.settings
|
d8a7803bd6df343f53f6a8d404c8a1ceb7bc2436
|
[
"MIT"
] | null | null | null |
import re
import paramiko
import winreg
import pathlib
import typing
import logging
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric import rsa
logger = logging.getLevelName(__name__)
STORE = winreg.HKEY_CURRENT_USER
PUTTY_PATH = pathlib.PureWindowsPath('Software', 'SimonTatham', 'PuTTY')
paramiko_to_putty_key = {
"ssh-rsa": "rsa2"
}
putty_to_paramiko_key = {val: key for key, val in paramiko_to_putty_key.items()}
class HostKeyEntry:
putty_host_entry_pattern = re.compile(r'(?P<key_type>.+)@(?P<port>.+):(?P<hostname>.+)')
paramiko_host_entry_pattern = re.compile(r'\[(?P<hostname>.+)\]:(?P<port>.+)')
def __init__(self, hostname: str = None, port: str = None, key_type: str = None, key: paramiko.PKey = None):
self.hostname = hostname
self.port = port
self.key_type = key_type
self.key = key
@property
def paramiko_host_entry(self):
if self.port == '22':
return self.hostname
else:
return "[{hostname}]:{port}".format(hostname=self.hostname, port=self.port)
@paramiko_host_entry.setter
def paramiko_host_entry(self, value):
m = self.paramiko_host_entry_pattern.match(value)
if m:
self.hostname = m.group('hostname')
self.port = m.group('port')
else:
self.hostname = value
self.port = '22'
@property
def paramiko_key_type(self):
return self.key_type
@paramiko_key_type.setter
def paramiko_key_type(self, value):
self.key_type = value
@property
def paramiko_key(self):
return self.key
@paramiko_key.setter
def paramiko_key(self, value):
self.key = value
@property
def putty_key_type(self):
return paramiko_to_putty_key[self.key_type]
@putty_key_type.setter
def putty_key_type(self, value):
self.key_type = putty_to_paramiko_key[value]
@property
def putty_host_entry(self):
return "{key_type}@{port}:{hostname}".format(key_type=self.putty_key_type, port=self.port, hostname=self.hostname)
@putty_host_entry.setter
def putty_host_entry(self, value):
m = self.putty_host_entry_pattern.match(value)
if m:
self.hostname = m.group('hostname')
self.port = m.group('port')
self.putty_key_type = m.group('key_type')
else:
raise Exception("Not valid host_entry")
@property
def putty_key(self):
if self.key_type == 'ssh-rsa' and isinstance(self.key, paramiko.RSAKey):
return '{e},{n}'.format(e=hex(self.key.public_numbers.e), n=hex(self.key.public_numbers.n))
@putty_key.setter
def putty_key(self, value):
if self.key_type == 'ssh-rsa':
e, n = (int(x, 0) for x in value.split(','))
self.key = paramiko.RSAKey(key=rsa.RSAPublicNumbers(e=e, n=n).public_key(default_backend()))
@classmethod
def from_registry_entry(cls, entry: typing.Tuple[str, str, int]):
o = cls()
o.putty_host_entry = entry[0]
o.putty_key = entry[1]
return o
@classmethod
def from_paramiko_entry(cls, host_entry, key_type, key):
o = cls()
o.paramiko_host_entry = host_entry
o.paramiko_key_type = key_type
o.paramiko_key = key
return o
class SshHostKeys:
path = str(PUTTY_PATH.joinpath('SshHostKeys'))
def __init__(self):
self.host_keys: typing.Dict[str, HostKeyEntry] = {}
def load(self):
for registry_entry in self.get_from_registry():
try:
self.add(HostKeyEntry.from_registry_entry(registry_entry))
except Exception:
logger.info("Invalid keyformat {}".format(registry_entry))
def save(self):
entries_to_remove = []
for registry_entry in self.get_from_registry():
if self.host_keys.get(registry_entry[0]) is None:
entries_to_remove.append(registry_entry[0])
self.delete_from_registry(entries_to_remove)
self.set_registry_to(self.host_keys)
def add(self, host_key_entry: HostKeyEntry):
self.host_keys[host_key_entry.putty_host_entry] = host_key_entry
def add_from_paramiko_host_keys(self, host_keys: paramiko.HostKeys):
for host_entry in host_keys.keys():
for key_type, key in host_keys.lookup(host_entry).items():
self.add(HostKeyEntry.from_paramiko_entry(host_entry=host_entry, key_type=key_type, key=key))
def add_to_paramiko_host_keys(self, host_keys: paramiko.HostKeys):
for key_type, host_key in self.host_keys.items():
host_keys.add(hostname=host_key.paramiko_host_entry, keytype=host_key.paramiko_key_type, key=host_key.paramiko_key)
@classmethod
def delete_from_registry(cls, entries):
with winreg.OpenKey(STORE, cls.path, 0, winreg.KEY_ALL_ACCESS) as key:
for entry in entries:
winreg.DeleteValue(key, entry)
@classmethod
def get_from_registry(cls):
with winreg.OpenKey(STORE, cls.path, 0, winreg.KEY_ALL_ACCESS) as key:
size = winreg.QueryInfoKey(key)[1]
return [winreg.EnumValue(key, i) for i in range(size)]
@classmethod
def set_registry_to(cls, host_keys):
with winreg.OpenKey(STORE, cls.path, 0, winreg.KEY_ALL_ACCESS) as key:
for key_type, host_key in host_keys.items():
try:
winreg.SetValueEx(key, host_key.putty_host_entry, 0, 1, host_key.putty_key)
except Exception:
logger.info("Invalid keyformat {}".format(host_key))
| 33.450292
| 127
| 0.651224
|
import re
import paramiko
import winreg
import pathlib
import typing
import logging
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric import rsa
logger = logging.getLevelName(__name__)
STORE = winreg.HKEY_CURRENT_USER
PUTTY_PATH = pathlib.PureWindowsPath('Software', 'SimonTatham', 'PuTTY')
paramiko_to_putty_key = {
"ssh-rsa": "rsa2"
}
putty_to_paramiko_key = {val: key for key, val in paramiko_to_putty_key.items()}
class HostKeyEntry:
putty_host_entry_pattern = re.compile(r'(?P<key_type>.+)@(?P<port>.+):(?P<hostname>.+)')
paramiko_host_entry_pattern = re.compile(r'\[(?P<hostname>.+)\]:(?P<port>.+)')
def __init__(self, hostname: str = None, port: str = None, key_type: str = None, key: paramiko.PKey = None):
self.hostname = hostname
self.port = port
self.key_type = key_type
self.key = key
@property
def paramiko_host_entry(self):
if self.port == '22':
return self.hostname
else:
return "[{hostname}]:{port}".format(hostname=self.hostname, port=self.port)
@paramiko_host_entry.setter
def paramiko_host_entry(self, value):
m = self.paramiko_host_entry_pattern.match(value)
if m:
self.hostname = m.group('hostname')
self.port = m.group('port')
else:
self.hostname = value
self.port = '22'
@property
def paramiko_key_type(self):
return self.key_type
@paramiko_key_type.setter
def paramiko_key_type(self, value):
self.key_type = value
@property
def paramiko_key(self):
return self.key
@paramiko_key.setter
def paramiko_key(self, value):
self.key = value
@property
def putty_key_type(self):
return paramiko_to_putty_key[self.key_type]
@putty_key_type.setter
def putty_key_type(self, value):
self.key_type = putty_to_paramiko_key[value]
@property
def putty_host_entry(self):
return "{key_type}@{port}:{hostname}".format(key_type=self.putty_key_type, port=self.port, hostname=self.hostname)
@putty_host_entry.setter
def putty_host_entry(self, value):
m = self.putty_host_entry_pattern.match(value)
if m:
self.hostname = m.group('hostname')
self.port = m.group('port')
self.putty_key_type = m.group('key_type')
else:
raise Exception("Not valid host_entry")
@property
def putty_key(self):
if self.key_type == 'ssh-rsa' and isinstance(self.key, paramiko.RSAKey):
return '{e},{n}'.format(e=hex(self.key.public_numbers.e), n=hex(self.key.public_numbers.n))
@putty_key.setter
def putty_key(self, value):
if self.key_type == 'ssh-rsa':
e, n = (int(x, 0) for x in value.split(','))
self.key = paramiko.RSAKey(key=rsa.RSAPublicNumbers(e=e, n=n).public_key(default_backend()))
@classmethod
def from_registry_entry(cls, entry: typing.Tuple[str, str, int]):
o = cls()
o.putty_host_entry = entry[0]
o.putty_key = entry[1]
return o
@classmethod
def from_paramiko_entry(cls, host_entry, key_type, key):
o = cls()
o.paramiko_host_entry = host_entry
o.paramiko_key_type = key_type
o.paramiko_key = key
return o
class SshHostKeys:
path = str(PUTTY_PATH.joinpath('SshHostKeys'))
def __init__(self):
self.host_keys: typing.Dict[str, HostKeyEntry] = {}
def load(self):
for registry_entry in self.get_from_registry():
try:
self.add(HostKeyEntry.from_registry_entry(registry_entry))
except Exception:
logger.info("Invalid keyformat {}".format(registry_entry))
def save(self):
entries_to_remove = []
for registry_entry in self.get_from_registry():
if self.host_keys.get(registry_entry[0]) is None:
entries_to_remove.append(registry_entry[0])
self.delete_from_registry(entries_to_remove)
self.set_registry_to(self.host_keys)
def add(self, host_key_entry: HostKeyEntry):
self.host_keys[host_key_entry.putty_host_entry] = host_key_entry
def add_from_paramiko_host_keys(self, host_keys: paramiko.HostKeys):
for host_entry in host_keys.keys():
for key_type, key in host_keys.lookup(host_entry).items():
self.add(HostKeyEntry.from_paramiko_entry(host_entry=host_entry, key_type=key_type, key=key))
def add_to_paramiko_host_keys(self, host_keys: paramiko.HostKeys):
for key_type, host_key in self.host_keys.items():
host_keys.add(hostname=host_key.paramiko_host_entry, keytype=host_key.paramiko_key_type, key=host_key.paramiko_key)
@classmethod
def delete_from_registry(cls, entries):
with winreg.OpenKey(STORE, cls.path, 0, winreg.KEY_ALL_ACCESS) as key:
for entry in entries:
winreg.DeleteValue(key, entry)
@classmethod
def get_from_registry(cls):
with winreg.OpenKey(STORE, cls.path, 0, winreg.KEY_ALL_ACCESS) as key:
size = winreg.QueryInfoKey(key)[1]
return [winreg.EnumValue(key, i) for i in range(size)]
@classmethod
def set_registry_to(cls, host_keys):
with winreg.OpenKey(STORE, cls.path, 0, winreg.KEY_ALL_ACCESS) as key:
for key_type, host_key in host_keys.items():
try:
winreg.SetValueEx(key, host_key.putty_host_entry, 0, 1, host_key.putty_key)
except Exception:
logger.info("Invalid keyformat {}".format(host_key))
| true
| true
|
1c481f629295c2319555aad0fd9a68a7fdad9f62
| 1,209
|
py
|
Python
|
torch_collections/models/_siamese_configs.py
|
mingruimingrui/torch-collections
|
f7c20b28b63de76c763983338aa4c825904ef4cd
|
[
"MIT"
] | 3
|
2018-08-14T19:40:58.000Z
|
2018-10-22T15:41:39.000Z
|
torch_collections/models/_siamese_configs.py
|
mingruimingrui/torch-collections
|
f7c20b28b63de76c763983338aa4c825904ef4cd
|
[
"MIT"
] | 2
|
2018-08-14T19:40:41.000Z
|
2018-10-29T14:46:40.000Z
|
torch_collections/models/_siamese_configs.py
|
mingruimingrui/torch-collections
|
f7c20b28b63de76c763983338aa4c825904ef4cd
|
[
"MIT"
] | null | null | null |
from __future__ import division
from copy import deepcopy
from ..utils.collections import AttrDict
# Define default parameters
_c = AttrDict()
################################################################################
#### Start of configurable parameters
#### Model configs
_c.name = 'encoder'
_c.input_size = [160, 160]
_c.embedding_size = 128
_c.backbone = 'resnet18'
_c.freeze_backbone = False
_c.l2_norm_alpha = 10 # based off https://arxiv.org/pdf/1703.09507.pdf
#### Loss configs
_c.margin = 0.5
_c.dist_type = 'euclidean' # option of ['euclidean', 'cosine']
_c.p_norm = 2.0 # normalization degree in euclidean distance
### Sample selection strategy
_c.negative_mining_type = 'hard' # option of ['random', 'semihard', 'hard']
################################################################################
#### End of configurable parameters
# Set default configs to be immutable
_c.immutable(True)
def make_configs(**kwargs):
configs = deepcopy(_c)
configs.immutable(False)
# Update default configs with user provided ones
for arg, value in kwargs.items():
configs[arg] = value
configs.immutable(True)
return configs
| 25.723404
| 80
| 0.604632
|
from __future__ import division
from copy import deepcopy
from ..utils.collections import AttrDict
_c = AttrDict()
| true
| true
|
1c4821226a68cf04839da2954c8a4716bab862ee
| 1,285
|
py
|
Python
|
atividade4.py
|
cauanicastro/Prog1Ifes
|
dc0ad7d42d45b837c76e178d43bf608afaab3f02
|
[
"Apache-2.0"
] | null | null | null |
atividade4.py
|
cauanicastro/Prog1Ifes
|
dc0ad7d42d45b837c76e178d43bf608afaab3f02
|
[
"Apache-2.0"
] | null | null | null |
atividade4.py
|
cauanicastro/Prog1Ifes
|
dc0ad7d42d45b837c76e178d43bf608afaab3f02
|
[
"Apache-2.0"
] | null | null | null |
__author__ = 'cauanicastro'
__copyright__ = "Copyright 2015, Cauani Castro"
__credits__ = ["Cauani Castro"]
__license__ = "Apache License 2.0"
__version__ = "1.0"
__maintainer__ = "Cauani Castro"
__email__ = "cauani.castro@hotmail.com"
__status__ = "Examination program"
def calculaRaiz(numero, aproximacoes):
raiz = 0
for i in range(aproximacoes):
if (i == 0):
raiz = numero / 2
else:
raiz = (raiz**2+numero) / (2 * raiz)
return "Num = %.5f Aprox = %d Raiz Quadrada = %.10f\n" % (numero, aproximacoes, raiz)
def main():
print("Este programa ira calcular a raiz quadrada de uma sequencia de numeros positivos, baseado no metodo de aproximacoes sucessivas de newton.")
print("Para sair do programa digite um numero menor ou igual a zero.")
while True:
numero = float(input("Digite um numero (real, positivo) para calcular a sua raiz quadrada:\n"))
if numero <= 0:
break
aproximacoes = int(input("Digite o numero (inteiro) de aproximacoes desejada:\n"))
print(calculaRaiz(numero, aproximacoes))
print("\n#####################################")
print(" FIM DO PROGRAMA")
print("#####################################")
if __name__ == '__main__':
main()
| 38.939394
| 150
| 0.610117
|
__author__ = 'cauanicastro'
__copyright__ = "Copyright 2015, Cauani Castro"
__credits__ = ["Cauani Castro"]
__license__ = "Apache License 2.0"
__version__ = "1.0"
__maintainer__ = "Cauani Castro"
__email__ = "cauani.castro@hotmail.com"
__status__ = "Examination program"
def calculaRaiz(numero, aproximacoes):
raiz = 0
for i in range(aproximacoes):
if (i == 0):
raiz = numero / 2
else:
raiz = (raiz**2+numero) / (2 * raiz)
return "Num = %.5f Aprox = %d Raiz Quadrada = %.10f\n" % (numero, aproximacoes, raiz)
def main():
print("Este programa ira calcular a raiz quadrada de uma sequencia de numeros positivos, baseado no metodo de aproximacoes sucessivas de newton.")
print("Para sair do programa digite um numero menor ou igual a zero.")
while True:
numero = float(input("Digite um numero (real, positivo) para calcular a sua raiz quadrada:\n"))
if numero <= 0:
break
aproximacoes = int(input("Digite o numero (inteiro) de aproximacoes desejada:\n"))
print(calculaRaiz(numero, aproximacoes))
print("\n#####################################")
print(" FIM DO PROGRAMA")
print("#####################################")
if __name__ == '__main__':
main()
| true
| true
|
1c4821aedc79f26cc40a3c7988e922eba605d9a0
| 926
|
py
|
Python
|
bomber_monkey/features/bomb/explosion.py
|
MonkeyPatchIo/bomber-monkey
|
8a351ef1a0ef18e9d98ad72d7274c41f02c0ed1b
|
[
"MIT"
] | null | null | null |
bomber_monkey/features/bomb/explosion.py
|
MonkeyPatchIo/bomber-monkey
|
8a351ef1a0ef18e9d98ad72d7274c41f02c0ed1b
|
[
"MIT"
] | null | null | null |
bomber_monkey/features/bomb/explosion.py
|
MonkeyPatchIo/bomber-monkey
|
8a351ef1a0ef18e9d98ad72d7274c41f02c0ed1b
|
[
"MIT"
] | null | null | null |
from enum import IntEnum
from python_ecs.ecs import Component
class ExplosionDirection(IntEnum):
UP = 1
LEFT = 2
RIGHT = 4
DOWN = 8
ALL = UP | LEFT | RIGHT | DOWN
@staticmethod
def opposed(direction: 'ExplosionDirection'):
if direction == ExplosionDirection.UP:
return ExplosionDirection.DOWN
if direction == ExplosionDirection.DOWN:
return ExplosionDirection.UP
if direction == ExplosionDirection.LEFT:
return ExplosionDirection.RIGHT
if direction == ExplosionDirection.RIGHT:
return ExplosionDirection.LEFT
class Explosion(Component):
def __init__(self, direction: ExplosionDirection, power: int) -> None:
super().__init__()
self.direction = direction
self.power = power
self.propagated = False
def __repr__(self):
return 'Explosion({})'.format(self.direction)
| 27.235294
| 74
| 0.653348
|
from enum import IntEnum
from python_ecs.ecs import Component
class ExplosionDirection(IntEnum):
UP = 1
LEFT = 2
RIGHT = 4
DOWN = 8
ALL = UP | LEFT | RIGHT | DOWN
@staticmethod
def opposed(direction: 'ExplosionDirection'):
if direction == ExplosionDirection.UP:
return ExplosionDirection.DOWN
if direction == ExplosionDirection.DOWN:
return ExplosionDirection.UP
if direction == ExplosionDirection.LEFT:
return ExplosionDirection.RIGHT
if direction == ExplosionDirection.RIGHT:
return ExplosionDirection.LEFT
class Explosion(Component):
def __init__(self, direction: ExplosionDirection, power: int) -> None:
super().__init__()
self.direction = direction
self.power = power
self.propagated = False
def __repr__(self):
return 'Explosion({})'.format(self.direction)
| true
| true
|
1c48221e622c65c1bec23b30d4231c02a9a6600d
| 751
|
py
|
Python
|
setup.py
|
Tynukua/getManga
|
8cc5b090ec3dfcc6cfa7db3ce9e5220e7ef54d2b
|
[
"MIT"
] | 3
|
2021-05-24T07:38:20.000Z
|
2022-03-30T14:47:23.000Z
|
setup.py
|
Tynukua/getManga
|
8cc5b090ec3dfcc6cfa7db3ce9e5220e7ef54d2b
|
[
"MIT"
] | 1
|
2021-03-17T08:59:44.000Z
|
2021-03-17T08:59:44.000Z
|
setup.py
|
Tynukua/getManga
|
8cc5b090ec3dfcc6cfa7db3ce9e5220e7ef54d2b
|
[
"MIT"
] | 1
|
2019-07-02T20:00:54.000Z
|
2019-07-02T20:00:54.000Z
|
import setuptools
with open("README.md", "r") as f:
long_description = f.read()
setuptools.setup(
name = 'getmanga',
version="0.1.6",
author="Tynukua",
author_email = 'tynuk.ua@gmail.com',
description = 'package for load manga! :)',
long_description=long_description,
long_description_content_type="text/markdown",
url='https://github.com/Tynukua/getManga',
packages=setuptools.find_packages(),
requires_python='>=3.7',
classifiers=[
'Programming Language :: Python :: 3.7',
'License :: OSI Approved :: MIT License',
"Operating System :: OS Independent",] ,
install_requires=[
'aiohttp',
'requests',
'beautifulsoup4',
'aiofiles']
)
| 27.814815
| 50
| 0.617843
|
import setuptools
with open("README.md", "r") as f:
long_description = f.read()
setuptools.setup(
name = 'getmanga',
version="0.1.6",
author="Tynukua",
author_email = 'tynuk.ua@gmail.com',
description = 'package for load manga! :)',
long_description=long_description,
long_description_content_type="text/markdown",
url='https://github.com/Tynukua/getManga',
packages=setuptools.find_packages(),
requires_python='>=3.7',
classifiers=[
'Programming Language :: Python :: 3.7',
'License :: OSI Approved :: MIT License',
"Operating System :: OS Independent",] ,
install_requires=[
'aiohttp',
'requests',
'beautifulsoup4',
'aiofiles']
)
| true
| true
|
1c4822a66e7bd46682a2b852d9f477c93c5099e2
| 736
|
py
|
Python
|
config/goalpost_default.py
|
ieu-acm/varipy
|
55e12c9f854ab0b568495d9bf682476ad182a88b
|
[
"MIT"
] | null | null | null |
config/goalpost_default.py
|
ieu-acm/varipy
|
55e12c9f854ab0b568495d9bf682476ad182a88b
|
[
"MIT"
] | null | null | null |
config/goalpost_default.py
|
ieu-acm/varipy
|
55e12c9f854ab0b568495d9bf682476ad182a88b
|
[
"MIT"
] | null | null | null |
""" Project configuration parameters """
from imgaug import augmenters as iaa
class config:
path = "data" # Relative to home directory of repository,
# includes "masked" and "original" sub-directories
input_shape = (256,256,3)
num_workers = 2
val_ratio = 0.2
weights_path = "weights"
epochs = 50
batch_size = 16
train_transforms = iaa.Sequential([
iaa.Crop(px=(1,16),keep_size=False),
iaa.Fliplr(0.5),
iaa.MotionBlur(),
iaa.FastSnowyLandscape(
lightness_threshold=[128, 200],
lightness_multiplier=(1.5, 3.5)),
iaa.Snowflakes(flake_size=(0.1, 0.4), speed=(0.01, 0.05)),
iaa.Fog(),
])
valid_transforms = None
| 26.285714
| 68
| 0.607337
|
from imgaug import augmenters as iaa
class config:
path = "data"
input_shape = (256,256,3)
num_workers = 2
val_ratio = 0.2
weights_path = "weights"
epochs = 50
batch_size = 16
train_transforms = iaa.Sequential([
iaa.Crop(px=(1,16),keep_size=False),
iaa.Fliplr(0.5),
iaa.MotionBlur(),
iaa.FastSnowyLandscape(
lightness_threshold=[128, 200],
lightness_multiplier=(1.5, 3.5)),
iaa.Snowflakes(flake_size=(0.1, 0.4), speed=(0.01, 0.05)),
iaa.Fog(),
])
valid_transforms = None
| true
| true
|
1c482301e0d3e00345447ebf90bd35859d42c3d2
| 2,271
|
py
|
Python
|
canella/config.py
|
mush42/Canella-CMS
|
b5132c271a3b8840f0b165c62d14de6853a3e5ac
|
[
"MIT"
] | 8
|
2017-01-30T22:46:40.000Z
|
2018-03-30T21:35:28.000Z
|
canella/config.py
|
mush42/Canella-CMS
|
b5132c271a3b8840f0b165c62d14de6853a3e5ac
|
[
"MIT"
] | null | null | null |
canella/config.py
|
mush42/Canella-CMS
|
b5132c271a3b8840f0b165c62d14de6853a3e5ac
|
[
"MIT"
] | 2
|
2018-01-16T10:31:27.000Z
|
2020-10-01T19:49:10.000Z
|
import os
from collections import OrderedDict
from canella import app
from canella.babel import lazy_gettext
_BASEDIR = app.root_path
HOME_SLUG = 'index'
DB_DIR = os.path.join(_BASEDIR, '..', '.ignore.local', 'data.db')
DEBUG = True
SECRET_KEY = '9bW7b2046be56b4c00b6f10dc2f3c4Ae56SL5PC9'
SQLALCHEMY_DATABASE_URI = "sqlite:///{}".format(DB_DIR)
SQLALCHEMY_TRACK_MODIFICATIONS = False
ERROR_404_HELP = False
CONTENT_PATH = os.path.join(_BASEDIR, 'uploads', 'content')
MEDIA_PATH = os.path.join(_BASEDIR, 'uploads', 'media')
FORM_UPLOADS_PATH = os.path.join(_BASEDIR, 'uploads', 'forms')
SECURITY_PASSWORD_HASH = 'pbkdf2_sha512'
SECURITY_PASSWORD_SALT = '540SDW4426HCAER56546aDrw213d2a6b9a94e15b5d'
SECURITY_USER_IDENTITY_ATTRIBUTES = ['email', 'user_name']
SECURITY_POST_LOGIN_VIEW = '/admin'
SECURITY_CONFIRMABLE = False
SECURITY_RECOVERABLE = True
SECURITY_RESET_URL = '/reset-password/'
ALLOWED_EXTENSIONS = ['doc', 'docx', 'ppt', 'pptx', 'pdf', 'zip']
ENABLE_INLINE_EDITING = True
SUPPORTED_LOCALES = OrderedDict((
('en', 'English'),
('ar', 'Arabic')
))
DEFAULT_LOCALE = 'en'
BABEL = dict(
domain='canella',
translations_directory=os.path.join(_BASEDIR, 'translations'),
babel_config=os.path.abspath(os.path.join(_BASEDIR, '..', 'babel', 'babel.cfg')),
catalog_output_path=os.path.abspath(os.path.join(_BASEDIR, '..', 'babel')),
catalog_filename=os.path.abspath(os.path.join(_BASEDIR, '..', 'babel', 'canella.pot')),
project_name='Canella-CMS',
)
# Add extra fields you want to add to the profile here
PROFILE_EXTRA_FIELDS = (
dict(name='language',
label=lazy_gettext('Default Site Language'),
description=lazy_gettext('All the site elements will be displayed in this language'),
type='select',
choices=lambda: app.config['SUPPORTED_LOCALES'].items(),
default=lambda: app.config['DEFAULT_LOCALE']),
dict(name='facebook_profile',
label=lazy_gettext('Facebook Profile'),
description=lazy_gettext('Will be displayed beneeth your bio in some places'), type='url', default=''),
dict(name='twitter_account',
label=lazy_gettext('Twitter Page URL'),
description=lazy_gettext('Will be displayed beneeth your bio in some places'), type='url', default=''),
)
| 39.842105
| 111
| 0.72303
|
import os
from collections import OrderedDict
from canella import app
from canella.babel import lazy_gettext
_BASEDIR = app.root_path
HOME_SLUG = 'index'
DB_DIR = os.path.join(_BASEDIR, '..', '.ignore.local', 'data.db')
DEBUG = True
SECRET_KEY = '9bW7b2046be56b4c00b6f10dc2f3c4Ae56SL5PC9'
SQLALCHEMY_DATABASE_URI = "sqlite:///{}".format(DB_DIR)
SQLALCHEMY_TRACK_MODIFICATIONS = False
ERROR_404_HELP = False
CONTENT_PATH = os.path.join(_BASEDIR, 'uploads', 'content')
MEDIA_PATH = os.path.join(_BASEDIR, 'uploads', 'media')
FORM_UPLOADS_PATH = os.path.join(_BASEDIR, 'uploads', 'forms')
SECURITY_PASSWORD_HASH = 'pbkdf2_sha512'
SECURITY_PASSWORD_SALT = '540SDW4426HCAER56546aDrw213d2a6b9a94e15b5d'
SECURITY_USER_IDENTITY_ATTRIBUTES = ['email', 'user_name']
SECURITY_POST_LOGIN_VIEW = '/admin'
SECURITY_CONFIRMABLE = False
SECURITY_RECOVERABLE = True
SECURITY_RESET_URL = '/reset-password/'
ALLOWED_EXTENSIONS = ['doc', 'docx', 'ppt', 'pptx', 'pdf', 'zip']
ENABLE_INLINE_EDITING = True
SUPPORTED_LOCALES = OrderedDict((
('en', 'English'),
('ar', 'Arabic')
))
DEFAULT_LOCALE = 'en'
BABEL = dict(
domain='canella',
translations_directory=os.path.join(_BASEDIR, 'translations'),
babel_config=os.path.abspath(os.path.join(_BASEDIR, '..', 'babel', 'babel.cfg')),
catalog_output_path=os.path.abspath(os.path.join(_BASEDIR, '..', 'babel')),
catalog_filename=os.path.abspath(os.path.join(_BASEDIR, '..', 'babel', 'canella.pot')),
project_name='Canella-CMS',
)
PROFILE_EXTRA_FIELDS = (
dict(name='language',
label=lazy_gettext('Default Site Language'),
description=lazy_gettext('All the site elements will be displayed in this language'),
type='select',
choices=lambda: app.config['SUPPORTED_LOCALES'].items(),
default=lambda: app.config['DEFAULT_LOCALE']),
dict(name='facebook_profile',
label=lazy_gettext('Facebook Profile'),
description=lazy_gettext('Will be displayed beneeth your bio in some places'), type='url', default=''),
dict(name='twitter_account',
label=lazy_gettext('Twitter Page URL'),
description=lazy_gettext('Will be displayed beneeth your bio in some places'), type='url', default=''),
)
| true
| true
|
1c4823a2bf4002b2e4471898f081ed69d54fd15f
| 29,184
|
py
|
Python
|
magnum/db/sqlalchemy/api.py
|
jflower154/magnum
|
2b0b3f3e4c9888ff323d4be5cb9b3e97fde4a67a
|
[
"Apache-2.0"
] | null | null | null |
magnum/db/sqlalchemy/api.py
|
jflower154/magnum
|
2b0b3f3e4c9888ff323d4be5cb9b3e97fde4a67a
|
[
"Apache-2.0"
] | null | null | null |
magnum/db/sqlalchemy/api.py
|
jflower154/magnum
|
2b0b3f3e4c9888ff323d4be5cb9b3e97fde4a67a
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""SQLAlchemy storage backend."""
from oslo_db import exception as db_exc
from oslo_db.sqlalchemy import session as db_session
from oslo_db.sqlalchemy import utils as db_utils
from oslo_log import log
from oslo_utils import importutils
from oslo_utils import strutils
from oslo_utils import timeutils
from oslo_utils import uuidutils
import sqlalchemy as sa
from sqlalchemy.orm.exc import MultipleResultsFound
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.sql import func
from magnum.common import clients
from magnum.common import context as request_context
from magnum.common import exception
import magnum.conf
from magnum.db import api
from magnum.db.sqlalchemy import models
from magnum.i18n import _
profiler_sqlalchemy = importutils.try_import('osprofiler.sqlalchemy')
CONF = magnum.conf.CONF
LOG = log.getLogger(__name__)
_FACADE = None
def _create_facade_lazily():
global _FACADE
if _FACADE is None:
_FACADE = db_session.EngineFacade.from_config(CONF)
if profiler_sqlalchemy:
if CONF.profiler.enabled and CONF.profiler.trace_sqlalchemy:
profiler_sqlalchemy.add_tracing(sa, _FACADE.get_engine(), "db")
return _FACADE
def get_engine():
facade = _create_facade_lazily()
return facade.get_engine()
def get_session(**kwargs):
facade = _create_facade_lazily()
return facade.get_session(**kwargs)
def get_backend():
"""The backend is this module itself."""
return Connection()
def model_query(model, *args, **kwargs):
"""Query helper for simpler session usage.
:param session: if present, the session to use
"""
session = kwargs.get('session') or get_session()
query = session.query(model, *args)
return query
def add_identity_filter(query, value):
"""Adds an identity filter to a query.
Filters results by ID, if supplied value is a valid integer.
Otherwise attempts to filter results by UUID.
:param query: Initial query to add filter to.
:param value: Value for filtering results by.
:return: Modified query.
"""
if strutils.is_int_like(value):
return query.filter_by(id=value)
elif uuidutils.is_uuid_like(value):
return query.filter_by(uuid=value)
else:
raise exception.InvalidIdentity(identity=value)
def _paginate_query(model, limit=None, marker=None, sort_key=None,
sort_dir=None, query=None):
if not query:
query = model_query(model)
sort_keys = ['id']
if sort_key and sort_key not in sort_keys:
sort_keys.insert(0, sort_key)
try:
query = db_utils.paginate_query(query, model, limit, sort_keys,
marker=marker, sort_dir=sort_dir)
except db_exc.InvalidSortKey:
raise exception.InvalidParameterValue(
_('The sort_key value "%(key)s" is an invalid field for sorting')
% {'key': sort_key})
return query.all()
class Connection(api.Connection):
"""SqlAlchemy connection."""
def __init__(self):
pass
def _add_tenant_filters(self, context, query):
if context.is_admin and context.all_tenants:
return query
admin_context = request_context.make_admin_context(all_tenants=True)
osc = clients.OpenStackClients(admin_context)
kst = osc.keystone()
# User in a regular project (not in the trustee domain)
if context.project_id and context.domain_id != kst.trustee_domain_id:
query = query.filter_by(project_id=context.project_id)
# Match project ID component in trustee user's user name against
# cluster's project_id to associate per-cluster trustee users who have
# no project information with the project their clusters/cluster models
# reside in. This is equivalent to the project filtering above.
elif context.domain_id == kst.trustee_domain_id:
user_name = kst.client.users.get(context.user_id).name
user_project = user_name.split('_', 2)[1]
query = query.filter_by(project_id=user_project)
else:
query = query.filter_by(user_id=context.user_id)
return query
def _add_clusters_filters(self, query, filters):
if filters is None:
filters = {}
possible_filters = ["cluster_template_id", "name", "node_count",
"master_count", "stack_id", "api_address",
"node_addresses", "project_id", "user_id"]
filter_names = set(filters).intersection(possible_filters)
filter_dict = {filter_name: filters[filter_name]
for filter_name in filter_names}
query = query.filter_by(**filter_dict)
if 'status' in filters:
query = query.filter(models.Cluster.status.in_(filters['status']))
return query
def get_cluster_list(self, context, filters=None, limit=None, marker=None,
sort_key=None, sort_dir=None):
query = model_query(models.Cluster)
query = self._add_tenant_filters(context, query)
query = self._add_clusters_filters(query, filters)
return _paginate_query(models.Cluster, limit, marker,
sort_key, sort_dir, query)
def create_cluster(self, values):
# ensure defaults are present for new clusters
if not values.get('uuid'):
values['uuid'] = uuidutils.generate_uuid()
cluster = models.Cluster()
cluster.update(values)
try:
cluster.save()
except db_exc.DBDuplicateEntry:
raise exception.ClusterAlreadyExists(uuid=values['uuid'])
return cluster
def get_cluster_by_id(self, context, cluster_id):
query = model_query(models.Cluster)
query = self._add_tenant_filters(context, query)
query = query.filter_by(id=cluster_id)
try:
return query.one()
except NoResultFound:
raise exception.ClusterNotFound(cluster=cluster_id)
def get_cluster_by_name(self, context, cluster_name):
query = model_query(models.Cluster)
query = self._add_tenant_filters(context, query)
query = query.filter_by(name=cluster_name)
try:
return query.one()
except MultipleResultsFound:
raise exception.Conflict('Multiple clusters exist with same name.'
' Please use the cluster uuid instead.')
except NoResultFound:
raise exception.ClusterNotFound(cluster=cluster_name)
def get_cluster_by_uuid(self, context, cluster_uuid):
query = model_query(models.Cluster)
query = self._add_tenant_filters(context, query)
query = query.filter_by(uuid=cluster_uuid)
try:
return query.one()
except NoResultFound:
raise exception.ClusterNotFound(cluster=cluster_uuid)
def get_cluster_stats(self, context, project_id=None):
query = model_query(models.Cluster)
node_count_col = models.Cluster.node_count
master_count_col = models.Cluster.master_count
ncfunc = func.sum(node_count_col + master_count_col)
if project_id:
query = query.filter_by(project_id=project_id)
nquery = query.session.query(ncfunc.label("nodes")).filter_by(
project_id=project_id)
else:
nquery = query.session.query(ncfunc.label("nodes"))
clusters = query.count()
nodes = int(nquery.one()[0]) if nquery.one()[0] else 0
return clusters, nodes
def get_cluster_count_all(self, context, filters=None):
query = model_query(models.Cluster)
query = self._add_tenant_filters(context, query)
query = self._add_clusters_filters(query, filters)
return query.count()
def destroy_cluster(self, cluster_id):
session = get_session()
with session.begin():
query = model_query(models.Cluster, session=session)
query = add_identity_filter(query, cluster_id)
try:
query.one()
except NoResultFound:
raise exception.ClusterNotFound(cluster=cluster_id)
query.delete()
def update_cluster(self, cluster_id, values):
# NOTE(dtantsur): this can lead to very strange errors
if 'uuid' in values:
msg = _("Cannot overwrite UUID for an existing Cluster.")
raise exception.InvalidParameterValue(err=msg)
return self._do_update_cluster(cluster_id, values)
def _do_update_cluster(self, cluster_id, values):
session = get_session()
with session.begin():
query = model_query(models.Cluster, session=session)
query = add_identity_filter(query, cluster_id)
try:
ref = query.with_lockmode('update').one()
except NoResultFound:
raise exception.ClusterNotFound(cluster=cluster_id)
ref.update(values)
return ref
def _add_cluster_template_filters(self, query, filters):
if filters is None:
filters = {}
possible_filters = ["name", "image_id", "flavor_id",
"master_flavor_id", "keypair_id",
"external_network_id", "dns_nameserver",
"project_id", "user_id", "labels"]
filter_names = set(filters).intersection(possible_filters)
filter_dict = {filter_name: filters[filter_name]
for filter_name in filter_names}
return query.filter_by(**filter_dict)
def get_cluster_template_list(self, context, filters=None, limit=None,
marker=None, sort_key=None, sort_dir=None):
query = model_query(models.ClusterTemplate)
query = self._add_tenant_filters(context, query)
query = self._add_cluster_template_filters(query, filters)
# include public ClusterTemplates
public_q = model_query(models.ClusterTemplate).filter_by(public=True)
query = query.union(public_q)
return _paginate_query(models.ClusterTemplate, limit, marker,
sort_key, sort_dir, query)
def create_cluster_template(self, values):
# ensure defaults are present for new ClusterTemplates
if not values.get('uuid'):
values['uuid'] = uuidutils.generate_uuid()
cluster_template = models.ClusterTemplate()
cluster_template.update(values)
try:
cluster_template.save()
except db_exc.DBDuplicateEntry:
raise exception.ClusterTemplateAlreadyExists(uuid=values['uuid'])
return cluster_template
def get_cluster_template_by_id(self, context, cluster_template_id):
query = model_query(models.ClusterTemplate)
query = self._add_tenant_filters(context, query)
public_q = model_query(models.ClusterTemplate).filter_by(public=True)
query = query.union(public_q)
query = query.filter_by(id=cluster_template_id)
try:
return query.one()
except NoResultFound:
raise exception.ClusterTemplateNotFound(
clustertemplate=cluster_template_id)
def get_cluster_template_by_uuid(self, context, cluster_template_uuid):
query = model_query(models.ClusterTemplate)
query = self._add_tenant_filters(context, query)
public_q = model_query(models.ClusterTemplate).filter_by(public=True)
query = query.union(public_q)
query = query.filter_by(uuid=cluster_template_uuid)
try:
return query.one()
except NoResultFound:
raise exception.ClusterTemplateNotFound(
clustertemplate=cluster_template_uuid)
def get_cluster_template_by_name(self, context, cluster_template_name):
query = model_query(models.ClusterTemplate)
query = self._add_tenant_filters(context, query)
public_q = model_query(models.ClusterTemplate).filter_by(public=True)
query = query.union(public_q)
query = query.filter_by(name=cluster_template_name)
try:
return query.one()
except MultipleResultsFound:
raise exception.Conflict('Multiple ClusterTemplates exist with'
' same name. Please use the '
'ClusterTemplate uuid instead.')
except NoResultFound:
raise exception.ClusterTemplateNotFound(
clustertemplate=cluster_template_name)
def _is_cluster_template_referenced(self, session, cluster_template_uuid):
"""Checks whether the ClusterTemplate is referenced by cluster(s)."""
query = model_query(models.Cluster, session=session)
query = self._add_clusters_filters(query, {'cluster_template_id':
cluster_template_uuid})
return query.count() != 0
def _is_publishing_cluster_template(self, values):
if (len(values) == 1 and
'public' in values and values['public'] is True):
return True
return False
def destroy_cluster_template(self, cluster_template_id):
session = get_session()
with session.begin():
query = model_query(models.ClusterTemplate, session=session)
query = add_identity_filter(query, cluster_template_id)
try:
cluster_template_ref = query.one()
except NoResultFound:
raise exception.ClusterTemplateNotFound(
clustertemplate=cluster_template_id)
if self._is_cluster_template_referenced(
session, cluster_template_ref['uuid']):
raise exception.ClusterTemplateReferenced(
clustertemplate=cluster_template_id)
query.delete()
def update_cluster_template(self, cluster_template_id, values):
# NOTE(dtantsur): this can lead to very strange errors
if 'uuid' in values:
msg = _("Cannot overwrite UUID for an existing ClusterTemplate.")
raise exception.InvalidParameterValue(err=msg)
return self._do_update_cluster_template(cluster_template_id, values)
def _do_update_cluster_template(self, cluster_template_id, values):
session = get_session()
with session.begin():
query = model_query(models.ClusterTemplate, session=session)
query = add_identity_filter(query, cluster_template_id)
try:
ref = query.with_lockmode('update').one()
except NoResultFound:
raise exception.ClusterTemplateNotFound(
clustertemplate=cluster_template_id)
if self._is_cluster_template_referenced(session, ref['uuid']):
# we only allow to update ClusterTemplate to be public
if not self._is_publishing_cluster_template(values):
raise exception.ClusterTemplateReferenced(
clustertemplate=cluster_template_id)
ref.update(values)
return ref
def create_x509keypair(self, values):
# ensure defaults are present for new x509keypairs
if not values.get('uuid'):
values['uuid'] = uuidutils.generate_uuid()
x509keypair = models.X509KeyPair()
x509keypair.update(values)
try:
x509keypair.save()
except db_exc.DBDuplicateEntry:
raise exception.X509KeyPairAlreadyExists(uuid=values['uuid'])
return x509keypair
def get_x509keypair_by_id(self, context, x509keypair_id):
query = model_query(models.X509KeyPair)
query = self._add_tenant_filters(context, query)
query = query.filter_by(id=x509keypair_id)
try:
return query.one()
except NoResultFound:
raise exception.X509KeyPairNotFound(x509keypair=x509keypair_id)
def get_x509keypair_by_uuid(self, context, x509keypair_uuid):
query = model_query(models.X509KeyPair)
query = self._add_tenant_filters(context, query)
query = query.filter_by(uuid=x509keypair_uuid)
try:
return query.one()
except NoResultFound:
raise exception.X509KeyPairNotFound(x509keypair=x509keypair_uuid)
def destroy_x509keypair(self, x509keypair_id):
session = get_session()
with session.begin():
query = model_query(models.X509KeyPair, session=session)
query = add_identity_filter(query, x509keypair_id)
count = query.delete()
if count != 1:
raise exception.X509KeyPairNotFound(x509keypair_id)
def update_x509keypair(self, x509keypair_id, values):
# NOTE(dtantsur): this can lead to very strange errors
if 'uuid' in values:
msg = _("Cannot overwrite UUID for an existing X509KeyPair.")
raise exception.InvalidParameterValue(err=msg)
return self._do_update_x509keypair(x509keypair_id, values)
def _do_update_x509keypair(self, x509keypair_id, values):
session = get_session()
with session.begin():
query = model_query(models.X509KeyPair, session=session)
query = add_identity_filter(query, x509keypair_id)
try:
ref = query.with_lockmode('update').one()
except NoResultFound:
raise exception.X509KeyPairNotFound(x509keypair=x509keypair_id)
ref.update(values)
return ref
def _add_x509keypairs_filters(self, query, filters):
if filters is None:
filters = {}
if 'project_id' in filters:
query = query.filter_by(project_id=filters['project_id'])
if 'user_id' in filters:
query = query.filter_by(user_id=filters['user_id'])
return query
def get_x509keypair_list(self, context, filters=None, limit=None,
marker=None, sort_key=None, sort_dir=None):
query = model_query(models.X509KeyPair)
query = self._add_tenant_filters(context, query)
query = self._add_x509keypairs_filters(query, filters)
return _paginate_query(models.X509KeyPair, limit, marker,
sort_key, sort_dir, query)
def destroy_magnum_service(self, magnum_service_id):
session = get_session()
with session.begin():
query = model_query(models.MagnumService, session=session)
query = add_identity_filter(query, magnum_service_id)
count = query.delete()
if count != 1:
raise exception.MagnumServiceNotFound(
magnum_service_id=magnum_service_id)
def update_magnum_service(self, magnum_service_id, values):
session = get_session()
with session.begin():
query = model_query(models.MagnumService, session=session)
query = add_identity_filter(query, magnum_service_id)
try:
ref = query.with_lockmode('update').one()
except NoResultFound:
raise exception.MagnumServiceNotFound(
magnum_service_id=magnum_service_id)
if 'report_count' in values:
if values['report_count'] > ref.report_count:
ref.last_seen_up = timeutils.utcnow()
ref.update(values)
return ref
def get_magnum_service_by_host_and_binary(self, host, binary):
query = model_query(models.MagnumService)
query = query.filter_by(host=host, binary=binary)
try:
return query.one()
except NoResultFound:
return None
def create_magnum_service(self, values):
magnum_service = models.MagnumService()
magnum_service.update(values)
try:
magnum_service.save()
except db_exc.DBDuplicateEntry:
host = values["host"]
binary = values["binary"]
LOG.warning("Magnum service with same host:%(host)s and"
" binary:%(binary)s had been saved into DB",
{'host': host, 'binary': binary})
query = model_query(models.MagnumService)
query = query.filter_by(host=host, binary=binary)
return query.one()
return magnum_service
def get_magnum_service_list(self, disabled=None, limit=None,
marker=None, sort_key=None, sort_dir=None
):
query = model_query(models.MagnumService)
if disabled:
query = query.filter_by(disabled=disabled)
return _paginate_query(models.MagnumService, limit, marker,
sort_key, sort_dir, query)
def create_quota(self, values):
quotas = models.Quota()
quotas.update(values)
try:
quotas.save()
except db_exc.DBDuplicateEntry:
raise exception.QuotaAlreadyExists(project_id=values['project_id'],
resource=values['resource'])
return quotas
def _add_quota_filters(self, query, filters):
if filters is None:
filters = {}
possible_filters = ["resource", "project_id"]
filter_names = set(filters).intersection(possible_filters)
filter_dict = {filter_name: filters[filter_name]
for filter_name in filter_names}
query = query.filter_by(**filter_dict)
return query
def get_quota_list(self, context, filters=None, limit=None, marker=None,
sort_key=None, sort_dir=None):
query = model_query(models.Quota)
query = self._add_quota_filters(query, filters)
return _paginate_query(models.Quota, limit, marker,
sort_key, sort_dir, query)
def update_quota(self, project_id, values):
session = get_session()
with session.begin():
query = model_query(models.Quota, session=session)
resource = values['resource']
try:
query = query.filter_by(project_id=project_id).filter_by(
resource=resource)
ref = query.with_lockmode('update').one()
except NoResultFound:
msg = (_('project_id %(project_id)s resource %(resource)s.') %
{'project_id': project_id, 'resource': resource})
raise exception.QuotaNotFound(msg=msg)
ref.update(values)
return ref
def delete_quota(self, project_id, resource):
session = get_session()
with session.begin():
query = model_query(models.Quota, session=session)
try:
query.filter_by(project_id=project_id).filter_by(
resource=resource).one()
except NoResultFound:
msg = (_('project_id %(project_id)s resource %(resource)s.') %
{'project_id': project_id, 'resource': resource})
raise exception.QuotaNotFound(msg=msg)
query.delete()
def get_quota_by_id(self, context, quota_id):
query = model_query(models.Quota)
query = query.filter_by(id=quota_id)
try:
return query.one()
except NoResultFound:
msg = _('quota id %s .') % quota_id
raise exception.QuotaNotFound(msg=msg)
def quota_get_all_by_project_id(self, project_id):
query = model_query(models.Quota)
result = query.filter_by(project_id=project_id).all()
return result
def get_quota_by_project_id_resource(self, project_id, resource):
query = model_query(models.Quota)
query = query.filter_by(project_id=project_id).filter_by(
resource=resource)
try:
return query.one()
except NoResultFound:
msg = (_('project_id %(project_id)s resource %(resource)s.') %
{'project_id': project_id, 'resource': resource})
raise exception.QuotaNotFound(msg=msg)
def _add_federation_filters(self, query, filters):
if filters is None:
filters = {}
possible_filters = ["name", "project_id", "hostcluster_id",
"member_ids", "properties"]
# TODO(clenimar): implement 'member_ids' filter as a contains query,
# so we return all the federations that have the given clusters,
# instead of all the federations that *only* have the exact given
# clusters.
filter_names = set(filters).intersection(possible_filters)
filter_dict = {filter_name: filters[filter_name]
for filter_name in filter_names}
query = query.filter_by(**filter_dict)
if 'status' in filters:
query = query.filter(
models.Federation.status.in_(filters['status']))
return query
def get_federation_by_id(self, context, federation_id):
query = model_query(models.Federation)
query = self._add_tenant_filters(context, query)
query = query.filter_by(id=federation_id)
try:
return query.one()
except NoResultFound:
raise exception.FederationNotFound(federation=federation_id)
def get_federation_by_uuid(self, context, federation_uuid):
query = model_query(models.Federation)
query = self._add_tenant_filters(context, query)
query = query.filter_by(uuid=federation_uuid)
try:
return query.one()
except NoResultFound:
raise exception.FederationNotFound(federation=federation_uuid)
def get_federation_by_name(self, context, federation_name):
query = model_query(models.Federation)
query = self._add_tenant_filters(context, query)
query = query.filter_by(name=federation_name)
try:
return query.one()
except MultipleResultsFound:
raise exception.Conflict('Multiple federations exist with same '
'name. Please use the federation uuid '
'instead.')
except NoResultFound:
raise exception.FederationNotFound(federation=federation_name)
def get_federation_list(self, context, limit=None, marker=None,
sort_key=None, sort_dir=None, filters=None):
query = model_query(models.Federation)
query = self._add_tenant_filters(context, query)
query = self._add_federation_filters(query, filters)
return _paginate_query(models.Federation, limit, marker,
sort_key, sort_dir, query)
def create_federation(self, values):
if not values.get('uuid'):
values['uuid'] = uuidutils.generate_uuid()
federation = models.Federation()
federation.update(values)
try:
federation.save()
except db_exc.DBDuplicateEntry:
raise exception.FederationAlreadyExists(uuid=values['uuid'])
return federation
def destroy_federation(self, federation_id):
session = get_session()
with session.begin():
query = model_query(models.Federation, session=session)
query = add_identity_filter(query, federation_id)
try:
query.one()
except NoResultFound:
raise exception.FederationNotFound(federation=federation_id)
query.delete()
def update_federation(self, federation_id, values):
if 'uuid' in values:
msg = _("Cannot overwrite UUID for an existing Federation.")
raise exception.InvalidParameterValue(err=msg)
return self._do_update_federation(federation_id, values)
def _do_update_federation(self, federation_id, values):
session = get_session()
with session.begin():
query = model_query(models.Federation, session=session)
query = add_identity_filter(query, federation_id)
try:
ref = query.with_lockmode('update').one()
except NoResultFound:
raise exception.FederationNotFound(federation=federation_id)
ref.update(values)
return ref
| 38.654305
| 79
| 0.636273
|
from oslo_db import exception as db_exc
from oslo_db.sqlalchemy import session as db_session
from oslo_db.sqlalchemy import utils as db_utils
from oslo_log import log
from oslo_utils import importutils
from oslo_utils import strutils
from oslo_utils import timeutils
from oslo_utils import uuidutils
import sqlalchemy as sa
from sqlalchemy.orm.exc import MultipleResultsFound
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.sql import func
from magnum.common import clients
from magnum.common import context as request_context
from magnum.common import exception
import magnum.conf
from magnum.db import api
from magnum.db.sqlalchemy import models
from magnum.i18n import _
profiler_sqlalchemy = importutils.try_import('osprofiler.sqlalchemy')
CONF = magnum.conf.CONF
LOG = log.getLogger(__name__)
_FACADE = None
def _create_facade_lazily():
global _FACADE
if _FACADE is None:
_FACADE = db_session.EngineFacade.from_config(CONF)
if profiler_sqlalchemy:
if CONF.profiler.enabled and CONF.profiler.trace_sqlalchemy:
profiler_sqlalchemy.add_tracing(sa, _FACADE.get_engine(), "db")
return _FACADE
def get_engine():
facade = _create_facade_lazily()
return facade.get_engine()
def get_session(**kwargs):
facade = _create_facade_lazily()
return facade.get_session(**kwargs)
def get_backend():
return Connection()
def model_query(model, *args, **kwargs):
session = kwargs.get('session') or get_session()
query = session.query(model, *args)
return query
def add_identity_filter(query, value):
if strutils.is_int_like(value):
return query.filter_by(id=value)
elif uuidutils.is_uuid_like(value):
return query.filter_by(uuid=value)
else:
raise exception.InvalidIdentity(identity=value)
def _paginate_query(model, limit=None, marker=None, sort_key=None,
sort_dir=None, query=None):
if not query:
query = model_query(model)
sort_keys = ['id']
if sort_key and sort_key not in sort_keys:
sort_keys.insert(0, sort_key)
try:
query = db_utils.paginate_query(query, model, limit, sort_keys,
marker=marker, sort_dir=sort_dir)
except db_exc.InvalidSortKey:
raise exception.InvalidParameterValue(
_('The sort_key value "%(key)s" is an invalid field for sorting')
% {'key': sort_key})
return query.all()
class Connection(api.Connection):
def __init__(self):
pass
def _add_tenant_filters(self, context, query):
if context.is_admin and context.all_tenants:
return query
admin_context = request_context.make_admin_context(all_tenants=True)
osc = clients.OpenStackClients(admin_context)
kst = osc.keystone()
if context.project_id and context.domain_id != kst.trustee_domain_id:
query = query.filter_by(project_id=context.project_id)
# cluster's project_id to associate per-cluster trustee users who have
elif context.domain_id == kst.trustee_domain_id:
user_name = kst.client.users.get(context.user_id).name
user_project = user_name.split('_', 2)[1]
query = query.filter_by(project_id=user_project)
else:
query = query.filter_by(user_id=context.user_id)
return query
def _add_clusters_filters(self, query, filters):
if filters is None:
filters = {}
possible_filters = ["cluster_template_id", "name", "node_count",
"master_count", "stack_id", "api_address",
"node_addresses", "project_id", "user_id"]
filter_names = set(filters).intersection(possible_filters)
filter_dict = {filter_name: filters[filter_name]
for filter_name in filter_names}
query = query.filter_by(**filter_dict)
if 'status' in filters:
query = query.filter(models.Cluster.status.in_(filters['status']))
return query
def get_cluster_list(self, context, filters=None, limit=None, marker=None,
sort_key=None, sort_dir=None):
query = model_query(models.Cluster)
query = self._add_tenant_filters(context, query)
query = self._add_clusters_filters(query, filters)
return _paginate_query(models.Cluster, limit, marker,
sort_key, sort_dir, query)
def create_cluster(self, values):
if not values.get('uuid'):
values['uuid'] = uuidutils.generate_uuid()
cluster = models.Cluster()
cluster.update(values)
try:
cluster.save()
except db_exc.DBDuplicateEntry:
raise exception.ClusterAlreadyExists(uuid=values['uuid'])
return cluster
def get_cluster_by_id(self, context, cluster_id):
query = model_query(models.Cluster)
query = self._add_tenant_filters(context, query)
query = query.filter_by(id=cluster_id)
try:
return query.one()
except NoResultFound:
raise exception.ClusterNotFound(cluster=cluster_id)
def get_cluster_by_name(self, context, cluster_name):
query = model_query(models.Cluster)
query = self._add_tenant_filters(context, query)
query = query.filter_by(name=cluster_name)
try:
return query.one()
except MultipleResultsFound:
raise exception.Conflict('Multiple clusters exist with same name.'
' Please use the cluster uuid instead.')
except NoResultFound:
raise exception.ClusterNotFound(cluster=cluster_name)
def get_cluster_by_uuid(self, context, cluster_uuid):
query = model_query(models.Cluster)
query = self._add_tenant_filters(context, query)
query = query.filter_by(uuid=cluster_uuid)
try:
return query.one()
except NoResultFound:
raise exception.ClusterNotFound(cluster=cluster_uuid)
def get_cluster_stats(self, context, project_id=None):
query = model_query(models.Cluster)
node_count_col = models.Cluster.node_count
master_count_col = models.Cluster.master_count
ncfunc = func.sum(node_count_col + master_count_col)
if project_id:
query = query.filter_by(project_id=project_id)
nquery = query.session.query(ncfunc.label("nodes")).filter_by(
project_id=project_id)
else:
nquery = query.session.query(ncfunc.label("nodes"))
clusters = query.count()
nodes = int(nquery.one()[0]) if nquery.one()[0] else 0
return clusters, nodes
def get_cluster_count_all(self, context, filters=None):
query = model_query(models.Cluster)
query = self._add_tenant_filters(context, query)
query = self._add_clusters_filters(query, filters)
return query.count()
def destroy_cluster(self, cluster_id):
session = get_session()
with session.begin():
query = model_query(models.Cluster, session=session)
query = add_identity_filter(query, cluster_id)
try:
query.one()
except NoResultFound:
raise exception.ClusterNotFound(cluster=cluster_id)
query.delete()
def update_cluster(self, cluster_id, values):
if 'uuid' in values:
msg = _("Cannot overwrite UUID for an existing Cluster.")
raise exception.InvalidParameterValue(err=msg)
return self._do_update_cluster(cluster_id, values)
def _do_update_cluster(self, cluster_id, values):
session = get_session()
with session.begin():
query = model_query(models.Cluster, session=session)
query = add_identity_filter(query, cluster_id)
try:
ref = query.with_lockmode('update').one()
except NoResultFound:
raise exception.ClusterNotFound(cluster=cluster_id)
ref.update(values)
return ref
def _add_cluster_template_filters(self, query, filters):
if filters is None:
filters = {}
possible_filters = ["name", "image_id", "flavor_id",
"master_flavor_id", "keypair_id",
"external_network_id", "dns_nameserver",
"project_id", "user_id", "labels"]
filter_names = set(filters).intersection(possible_filters)
filter_dict = {filter_name: filters[filter_name]
for filter_name in filter_names}
return query.filter_by(**filter_dict)
def get_cluster_template_list(self, context, filters=None, limit=None,
marker=None, sort_key=None, sort_dir=None):
query = model_query(models.ClusterTemplate)
query = self._add_tenant_filters(context, query)
query = self._add_cluster_template_filters(query, filters)
public_q = model_query(models.ClusterTemplate).filter_by(public=True)
query = query.union(public_q)
return _paginate_query(models.ClusterTemplate, limit, marker,
sort_key, sort_dir, query)
def create_cluster_template(self, values):
if not values.get('uuid'):
values['uuid'] = uuidutils.generate_uuid()
cluster_template = models.ClusterTemplate()
cluster_template.update(values)
try:
cluster_template.save()
except db_exc.DBDuplicateEntry:
raise exception.ClusterTemplateAlreadyExists(uuid=values['uuid'])
return cluster_template
def get_cluster_template_by_id(self, context, cluster_template_id):
query = model_query(models.ClusterTemplate)
query = self._add_tenant_filters(context, query)
public_q = model_query(models.ClusterTemplate).filter_by(public=True)
query = query.union(public_q)
query = query.filter_by(id=cluster_template_id)
try:
return query.one()
except NoResultFound:
raise exception.ClusterTemplateNotFound(
clustertemplate=cluster_template_id)
def get_cluster_template_by_uuid(self, context, cluster_template_uuid):
query = model_query(models.ClusterTemplate)
query = self._add_tenant_filters(context, query)
public_q = model_query(models.ClusterTemplate).filter_by(public=True)
query = query.union(public_q)
query = query.filter_by(uuid=cluster_template_uuid)
try:
return query.one()
except NoResultFound:
raise exception.ClusterTemplateNotFound(
clustertemplate=cluster_template_uuid)
def get_cluster_template_by_name(self, context, cluster_template_name):
query = model_query(models.ClusterTemplate)
query = self._add_tenant_filters(context, query)
public_q = model_query(models.ClusterTemplate).filter_by(public=True)
query = query.union(public_q)
query = query.filter_by(name=cluster_template_name)
try:
return query.one()
except MultipleResultsFound:
raise exception.Conflict('Multiple ClusterTemplates exist with'
' same name. Please use the '
'ClusterTemplate uuid instead.')
except NoResultFound:
raise exception.ClusterTemplateNotFound(
clustertemplate=cluster_template_name)
def _is_cluster_template_referenced(self, session, cluster_template_uuid):
query = model_query(models.Cluster, session=session)
query = self._add_clusters_filters(query, {'cluster_template_id':
cluster_template_uuid})
return query.count() != 0
def _is_publishing_cluster_template(self, values):
if (len(values) == 1 and
'public' in values and values['public'] is True):
return True
return False
def destroy_cluster_template(self, cluster_template_id):
session = get_session()
with session.begin():
query = model_query(models.ClusterTemplate, session=session)
query = add_identity_filter(query, cluster_template_id)
try:
cluster_template_ref = query.one()
except NoResultFound:
raise exception.ClusterTemplateNotFound(
clustertemplate=cluster_template_id)
if self._is_cluster_template_referenced(
session, cluster_template_ref['uuid']):
raise exception.ClusterTemplateReferenced(
clustertemplate=cluster_template_id)
query.delete()
def update_cluster_template(self, cluster_template_id, values):
if 'uuid' in values:
msg = _("Cannot overwrite UUID for an existing ClusterTemplate.")
raise exception.InvalidParameterValue(err=msg)
return self._do_update_cluster_template(cluster_template_id, values)
def _do_update_cluster_template(self, cluster_template_id, values):
session = get_session()
with session.begin():
query = model_query(models.ClusterTemplate, session=session)
query = add_identity_filter(query, cluster_template_id)
try:
ref = query.with_lockmode('update').one()
except NoResultFound:
raise exception.ClusterTemplateNotFound(
clustertemplate=cluster_template_id)
if self._is_cluster_template_referenced(session, ref['uuid']):
if not self._is_publishing_cluster_template(values):
raise exception.ClusterTemplateReferenced(
clustertemplate=cluster_template_id)
ref.update(values)
return ref
def create_x509keypair(self, values):
if not values.get('uuid'):
values['uuid'] = uuidutils.generate_uuid()
x509keypair = models.X509KeyPair()
x509keypair.update(values)
try:
x509keypair.save()
except db_exc.DBDuplicateEntry:
raise exception.X509KeyPairAlreadyExists(uuid=values['uuid'])
return x509keypair
def get_x509keypair_by_id(self, context, x509keypair_id):
query = model_query(models.X509KeyPair)
query = self._add_tenant_filters(context, query)
query = query.filter_by(id=x509keypair_id)
try:
return query.one()
except NoResultFound:
raise exception.X509KeyPairNotFound(x509keypair=x509keypair_id)
def get_x509keypair_by_uuid(self, context, x509keypair_uuid):
query = model_query(models.X509KeyPair)
query = self._add_tenant_filters(context, query)
query = query.filter_by(uuid=x509keypair_uuid)
try:
return query.one()
except NoResultFound:
raise exception.X509KeyPairNotFound(x509keypair=x509keypair_uuid)
def destroy_x509keypair(self, x509keypair_id):
session = get_session()
with session.begin():
query = model_query(models.X509KeyPair, session=session)
query = add_identity_filter(query, x509keypair_id)
count = query.delete()
if count != 1:
raise exception.X509KeyPairNotFound(x509keypair_id)
def update_x509keypair(self, x509keypair_id, values):
if 'uuid' in values:
msg = _("Cannot overwrite UUID for an existing X509KeyPair.")
raise exception.InvalidParameterValue(err=msg)
return self._do_update_x509keypair(x509keypair_id, values)
def _do_update_x509keypair(self, x509keypair_id, values):
session = get_session()
with session.begin():
query = model_query(models.X509KeyPair, session=session)
query = add_identity_filter(query, x509keypair_id)
try:
ref = query.with_lockmode('update').one()
except NoResultFound:
raise exception.X509KeyPairNotFound(x509keypair=x509keypair_id)
ref.update(values)
return ref
def _add_x509keypairs_filters(self, query, filters):
if filters is None:
filters = {}
if 'project_id' in filters:
query = query.filter_by(project_id=filters['project_id'])
if 'user_id' in filters:
query = query.filter_by(user_id=filters['user_id'])
return query
def get_x509keypair_list(self, context, filters=None, limit=None,
marker=None, sort_key=None, sort_dir=None):
query = model_query(models.X509KeyPair)
query = self._add_tenant_filters(context, query)
query = self._add_x509keypairs_filters(query, filters)
return _paginate_query(models.X509KeyPair, limit, marker,
sort_key, sort_dir, query)
def destroy_magnum_service(self, magnum_service_id):
session = get_session()
with session.begin():
query = model_query(models.MagnumService, session=session)
query = add_identity_filter(query, magnum_service_id)
count = query.delete()
if count != 1:
raise exception.MagnumServiceNotFound(
magnum_service_id=magnum_service_id)
def update_magnum_service(self, magnum_service_id, values):
session = get_session()
with session.begin():
query = model_query(models.MagnumService, session=session)
query = add_identity_filter(query, magnum_service_id)
try:
ref = query.with_lockmode('update').one()
except NoResultFound:
raise exception.MagnumServiceNotFound(
magnum_service_id=magnum_service_id)
if 'report_count' in values:
if values['report_count'] > ref.report_count:
ref.last_seen_up = timeutils.utcnow()
ref.update(values)
return ref
def get_magnum_service_by_host_and_binary(self, host, binary):
query = model_query(models.MagnumService)
query = query.filter_by(host=host, binary=binary)
try:
return query.one()
except NoResultFound:
return None
def create_magnum_service(self, values):
magnum_service = models.MagnumService()
magnum_service.update(values)
try:
magnum_service.save()
except db_exc.DBDuplicateEntry:
host = values["host"]
binary = values["binary"]
LOG.warning("Magnum service with same host:%(host)s and"
" binary:%(binary)s had been saved into DB",
{'host': host, 'binary': binary})
query = model_query(models.MagnumService)
query = query.filter_by(host=host, binary=binary)
return query.one()
return magnum_service
def get_magnum_service_list(self, disabled=None, limit=None,
marker=None, sort_key=None, sort_dir=None
):
query = model_query(models.MagnumService)
if disabled:
query = query.filter_by(disabled=disabled)
return _paginate_query(models.MagnumService, limit, marker,
sort_key, sort_dir, query)
def create_quota(self, values):
quotas = models.Quota()
quotas.update(values)
try:
quotas.save()
except db_exc.DBDuplicateEntry:
raise exception.QuotaAlreadyExists(project_id=values['project_id'],
resource=values['resource'])
return quotas
def _add_quota_filters(self, query, filters):
if filters is None:
filters = {}
possible_filters = ["resource", "project_id"]
filter_names = set(filters).intersection(possible_filters)
filter_dict = {filter_name: filters[filter_name]
for filter_name in filter_names}
query = query.filter_by(**filter_dict)
return query
def get_quota_list(self, context, filters=None, limit=None, marker=None,
sort_key=None, sort_dir=None):
query = model_query(models.Quota)
query = self._add_quota_filters(query, filters)
return _paginate_query(models.Quota, limit, marker,
sort_key, sort_dir, query)
def update_quota(self, project_id, values):
session = get_session()
with session.begin():
query = model_query(models.Quota, session=session)
resource = values['resource']
try:
query = query.filter_by(project_id=project_id).filter_by(
resource=resource)
ref = query.with_lockmode('update').one()
except NoResultFound:
msg = (_('project_id %(project_id)s resource %(resource)s.') %
{'project_id': project_id, 'resource': resource})
raise exception.QuotaNotFound(msg=msg)
ref.update(values)
return ref
def delete_quota(self, project_id, resource):
session = get_session()
with session.begin():
query = model_query(models.Quota, session=session)
try:
query.filter_by(project_id=project_id).filter_by(
resource=resource).one()
except NoResultFound:
msg = (_('project_id %(project_id)s resource %(resource)s.') %
{'project_id': project_id, 'resource': resource})
raise exception.QuotaNotFound(msg=msg)
query.delete()
def get_quota_by_id(self, context, quota_id):
query = model_query(models.Quota)
query = query.filter_by(id=quota_id)
try:
return query.one()
except NoResultFound:
msg = _('quota id %s .') % quota_id
raise exception.QuotaNotFound(msg=msg)
def quota_get_all_by_project_id(self, project_id):
query = model_query(models.Quota)
result = query.filter_by(project_id=project_id).all()
return result
def get_quota_by_project_id_resource(self, project_id, resource):
query = model_query(models.Quota)
query = query.filter_by(project_id=project_id).filter_by(
resource=resource)
try:
return query.one()
except NoResultFound:
msg = (_('project_id %(project_id)s resource %(resource)s.') %
{'project_id': project_id, 'resource': resource})
raise exception.QuotaNotFound(msg=msg)
def _add_federation_filters(self, query, filters):
if filters is None:
filters = {}
possible_filters = ["name", "project_id", "hostcluster_id",
"member_ids", "properties"]
filter_names = set(filters).intersection(possible_filters)
filter_dict = {filter_name: filters[filter_name]
for filter_name in filter_names}
query = query.filter_by(**filter_dict)
if 'status' in filters:
query = query.filter(
models.Federation.status.in_(filters['status']))
return query
def get_federation_by_id(self, context, federation_id):
query = model_query(models.Federation)
query = self._add_tenant_filters(context, query)
query = query.filter_by(id=federation_id)
try:
return query.one()
except NoResultFound:
raise exception.FederationNotFound(federation=federation_id)
def get_federation_by_uuid(self, context, federation_uuid):
query = model_query(models.Federation)
query = self._add_tenant_filters(context, query)
query = query.filter_by(uuid=federation_uuid)
try:
return query.one()
except NoResultFound:
raise exception.FederationNotFound(federation=federation_uuid)
def get_federation_by_name(self, context, federation_name):
query = model_query(models.Federation)
query = self._add_tenant_filters(context, query)
query = query.filter_by(name=federation_name)
try:
return query.one()
except MultipleResultsFound:
raise exception.Conflict('Multiple federations exist with same '
'name. Please use the federation uuid '
'instead.')
except NoResultFound:
raise exception.FederationNotFound(federation=federation_name)
def get_federation_list(self, context, limit=None, marker=None,
sort_key=None, sort_dir=None, filters=None):
query = model_query(models.Federation)
query = self._add_tenant_filters(context, query)
query = self._add_federation_filters(query, filters)
return _paginate_query(models.Federation, limit, marker,
sort_key, sort_dir, query)
def create_federation(self, values):
if not values.get('uuid'):
values['uuid'] = uuidutils.generate_uuid()
federation = models.Federation()
federation.update(values)
try:
federation.save()
except db_exc.DBDuplicateEntry:
raise exception.FederationAlreadyExists(uuid=values['uuid'])
return federation
def destroy_federation(self, federation_id):
session = get_session()
with session.begin():
query = model_query(models.Federation, session=session)
query = add_identity_filter(query, federation_id)
try:
query.one()
except NoResultFound:
raise exception.FederationNotFound(federation=federation_id)
query.delete()
def update_federation(self, federation_id, values):
if 'uuid' in values:
msg = _("Cannot overwrite UUID for an existing Federation.")
raise exception.InvalidParameterValue(err=msg)
return self._do_update_federation(federation_id, values)
def _do_update_federation(self, federation_id, values):
session = get_session()
with session.begin():
query = model_query(models.Federation, session=session)
query = add_identity_filter(query, federation_id)
try:
ref = query.with_lockmode('update').one()
except NoResultFound:
raise exception.FederationNotFound(federation=federation_id)
ref.update(values)
return ref
| true
| true
|
1c4823f0ebe1ed95a32f25d097316665a3c42efe
| 11,938
|
py
|
Python
|
official/nlp/modeling/networks/bert_encoder.py
|
62theories/tf-flask
|
c6954f0f3c4082165c92c77bb06d2fec6e75a8c4
|
[
"Apache-2.0"
] | 82,518
|
2016-02-05T12:07:23.000Z
|
2022-03-31T23:09:47.000Z
|
official/nlp/modeling/networks/bert_encoder.py
|
62theories/tf-flask
|
c6954f0f3c4082165c92c77bb06d2fec6e75a8c4
|
[
"Apache-2.0"
] | 9,021
|
2016-03-08T01:02:05.000Z
|
2022-03-31T08:06:35.000Z
|
official/nlp/modeling/networks/bert_encoder.py
|
62theories/tf-flask
|
c6954f0f3c4082165c92c77bb06d2fec6e75a8c4
|
[
"Apache-2.0"
] | 54,341
|
2016-02-06T17:19:55.000Z
|
2022-03-31T10:27:44.000Z
|
# Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Transformer-based BERT encoder network."""
# pylint: disable=g-classes-have-attributes
from absl import logging
import tensorflow as tf
from official.nlp.modeling import layers
@tf.keras.utils.register_keras_serializable(package='Text')
class BertEncoder(tf.keras.Model):
"""Bi-directional Transformer-based encoder network.
This network implements a bi-directional Transformer-based encoder as
described in "BERT: Pre-training of Deep Bidirectional Transformers for
Language Understanding" (https://arxiv.org/abs/1810.04805). It includes the
embedding lookups and transformer layers, but not the masked language model
or classification task networks.
The default values for this object are taken from the BERT-Base implementation
in "BERT: Pre-training of Deep Bidirectional Transformers for Language
Understanding".
*Note* that the network is constructed by
[Keras Functional API](https://keras.io/guides/functional_api/).
Args:
vocab_size: The size of the token vocabulary.
hidden_size: The size of the transformer hidden layers.
num_layers: The number of transformer layers.
num_attention_heads: The number of attention heads for each transformer. The
hidden size must be divisible by the number of attention heads.
max_sequence_length: The maximum sequence length that this encoder can
consume. If None, max_sequence_length uses the value from sequence length.
This determines the variable shape for positional embeddings.
type_vocab_size: The number of types that the 'type_ids' input can take.
inner_dim: The output dimension of the first Dense layer in a two-layer
feedforward network for each transformer.
inner_activation: The activation for the first Dense layer in a two-layer
feedforward network for each transformer.
output_dropout: Dropout probability for the post-attention and output
dropout.
attention_dropout: The dropout rate to use for the attention layers
within the transformer layers.
initializer: The initialzer to use for all weights in this encoder.
output_range: The sequence output range, [0, output_range), by slicing the
target sequence of the last transformer layer. `None` means the entire
target sequence will attend to the source sequence, which yields the full
output.
embedding_width: The width of the word embeddings. If the embedding width is
not equal to hidden size, embedding parameters will be factorized into two
matrices in the shape of ['vocab_size', 'embedding_width'] and
['embedding_width', 'hidden_size'] ('embedding_width' is usually much
smaller than 'hidden_size').
embedding_layer: An optional Layer instance which will be called to
generate embeddings for the input word IDs.
norm_first: Whether to normalize inputs to attention and intermediate
dense layers. If set False, output of attention and intermediate dense
layers is normalized.
dict_outputs: Whether to use a dictionary as the model outputs.
return_all_encoder_outputs: Whether to output sequence embedding outputs of
all encoder transformer layers. Note: when the following `dict_outputs`
argument is True, all encoder outputs are always returned in the dict,
keyed by `encoder_outputs`.
"""
def __init__(
self,
vocab_size,
hidden_size=768,
num_layers=12,
num_attention_heads=12,
max_sequence_length=512,
type_vocab_size=16,
inner_dim=3072,
inner_activation=lambda x: tf.keras.activations.gelu(x, approximate=True),
output_dropout=0.1,
attention_dropout=0.1,
initializer=tf.keras.initializers.TruncatedNormal(stddev=0.02),
output_range=None,
embedding_width=None,
embedding_layer=None,
norm_first=False,
dict_outputs=False,
return_all_encoder_outputs=False,
**kwargs):
if 'sequence_length' in kwargs:
kwargs.pop('sequence_length')
logging.warning('`sequence_length` is a deprecated argument to '
'`BertEncoder`, which has no effect for a while. Please '
'remove `sequence_length` argument.')
# Handles backward compatible kwargs.
if 'intermediate_size' in kwargs:
inner_dim = kwargs.pop('intermediate_size')
if 'activation' in kwargs:
inner_activation = kwargs.pop('activation')
if 'dropout_rate' in kwargs:
output_dropout = kwargs.pop('dropout_rate')
if 'attention_dropout_rate' in kwargs:
attention_dropout = kwargs.pop('attention_dropout_rate')
activation = tf.keras.activations.get(inner_activation)
initializer = tf.keras.initializers.get(initializer)
word_ids = tf.keras.layers.Input(
shape=(None,), dtype=tf.int32, name='input_word_ids')
mask = tf.keras.layers.Input(
shape=(None,), dtype=tf.int32, name='input_mask')
type_ids = tf.keras.layers.Input(
shape=(None,), dtype=tf.int32, name='input_type_ids')
if embedding_width is None:
embedding_width = hidden_size
if embedding_layer is None:
embedding_layer_inst = layers.OnDeviceEmbedding(
vocab_size=vocab_size,
embedding_width=embedding_width,
initializer=initializer,
name='word_embeddings')
else:
embedding_layer_inst = embedding_layer
word_embeddings = embedding_layer_inst(word_ids)
# Always uses dynamic slicing for simplicity.
position_embedding_layer = layers.PositionEmbedding(
initializer=initializer,
max_length=max_sequence_length,
name='position_embedding')
position_embeddings = position_embedding_layer(word_embeddings)
type_embedding_layer = layers.OnDeviceEmbedding(
vocab_size=type_vocab_size,
embedding_width=embedding_width,
initializer=initializer,
use_one_hot=True,
name='type_embeddings')
type_embeddings = type_embedding_layer(type_ids)
embeddings = tf.keras.layers.Add()(
[word_embeddings, position_embeddings, type_embeddings])
embedding_norm_layer = tf.keras.layers.LayerNormalization(
name='embeddings/layer_norm', axis=-1, epsilon=1e-12, dtype=tf.float32)
embeddings = embedding_norm_layer(embeddings)
embeddings = (tf.keras.layers.Dropout(rate=output_dropout)(embeddings))
# We project the 'embedding' output to 'hidden_size' if it is not already
# 'hidden_size'.
if embedding_width != hidden_size:
embedding_projection = tf.keras.layers.experimental.EinsumDense(
'...x,xy->...y',
output_shape=hidden_size,
bias_axes='y',
kernel_initializer=initializer,
name='embedding_projection')
embeddings = embedding_projection(embeddings)
else:
embedding_projection = None
transformer_layers = []
data = embeddings
attention_mask = layers.SelfAttentionMask()(data, mask)
encoder_outputs = []
for i in range(num_layers):
if i == num_layers - 1 and output_range is not None:
transformer_output_range = output_range
else:
transformer_output_range = None
layer = layers.TransformerEncoderBlock(
num_attention_heads=num_attention_heads,
inner_dim=inner_dim,
inner_activation=inner_activation,
output_dropout=output_dropout,
attention_dropout=attention_dropout,
norm_first=norm_first,
output_range=transformer_output_range,
kernel_initializer=initializer,
name='transformer/layer_%d' % i)
transformer_layers.append(layer)
data = layer([data, attention_mask])
encoder_outputs.append(data)
last_encoder_output = encoder_outputs[-1]
# Applying a tf.slice op (through subscript notation) to a Keras tensor
# like this will create a SliceOpLambda layer. This is better than a Lambda
# layer with Python code, because that is fundamentally less portable.
first_token_tensor = last_encoder_output[:, 0, :]
pooler_layer = tf.keras.layers.Dense(
units=hidden_size,
activation='tanh',
kernel_initializer=initializer,
name='pooler_transform')
cls_output = pooler_layer(first_token_tensor)
outputs = dict(
sequence_output=encoder_outputs[-1],
pooled_output=cls_output,
encoder_outputs=encoder_outputs,
)
if dict_outputs:
super().__init__(
inputs=[word_ids, mask, type_ids], outputs=outputs, **kwargs)
else:
cls_output = outputs['pooled_output']
if return_all_encoder_outputs:
encoder_outputs = outputs['encoder_outputs']
outputs = [encoder_outputs, cls_output]
else:
sequence_output = outputs['sequence_output']
outputs = [sequence_output, cls_output]
super().__init__( # pylint: disable=bad-super-call
inputs=[word_ids, mask, type_ids],
outputs=outputs,
**kwargs)
self._pooler_layer = pooler_layer
self._transformer_layers = transformer_layers
self._embedding_norm_layer = embedding_norm_layer
self._embedding_layer = embedding_layer_inst
self._position_embedding_layer = position_embedding_layer
self._type_embedding_layer = type_embedding_layer
if embedding_projection is not None:
self._embedding_projection = embedding_projection
config_dict = {
'vocab_size': vocab_size,
'hidden_size': hidden_size,
'num_layers': num_layers,
'num_attention_heads': num_attention_heads,
'max_sequence_length': max_sequence_length,
'type_vocab_size': type_vocab_size,
'inner_dim': inner_dim,
'inner_activation': tf.keras.activations.serialize(activation),
'output_dropout': output_dropout,
'attention_dropout': attention_dropout,
'initializer': tf.keras.initializers.serialize(initializer),
'output_range': output_range,
'embedding_width': embedding_width,
'embedding_layer': embedding_layer,
'norm_first': norm_first,
'dict_outputs': dict_outputs,
}
# pylint: disable=protected-access
self._setattr_tracking = False
self._config = config_dict
self._setattr_tracking = True
# pylint: enable=protected-access
def get_embedding_table(self):
return self._embedding_layer.embeddings
def get_embedding_layer(self):
return self._embedding_layer
def get_config(self):
return self._config
@property
def transformer_layers(self):
"""List of Transformer layers in the encoder."""
return self._transformer_layers
@property
def pooler_layer(self):
"""The pooler dense layer after the transformer layers."""
return self._pooler_layer
@classmethod
def from_config(cls, config, custom_objects=None):
if 'embedding_layer' in config and config['embedding_layer'] is not None:
warn_string = (
'You are reloading a model that was saved with a '
'potentially-shared embedding layer object. If you contine to '
'train this model, the embedding layer will no longer be shared. '
'To work around this, load the model outside of the Keras API.')
print('WARNING: ' + warn_string)
logging.warn(warn_string)
return cls(**config)
| 39.793333
| 80
| 0.714609
|
from absl import logging
import tensorflow as tf
from official.nlp.modeling import layers
@tf.keras.utils.register_keras_serializable(package='Text')
class BertEncoder(tf.keras.Model):
def __init__(
self,
vocab_size,
hidden_size=768,
num_layers=12,
num_attention_heads=12,
max_sequence_length=512,
type_vocab_size=16,
inner_dim=3072,
inner_activation=lambda x: tf.keras.activations.gelu(x, approximate=True),
output_dropout=0.1,
attention_dropout=0.1,
initializer=tf.keras.initializers.TruncatedNormal(stddev=0.02),
output_range=None,
embedding_width=None,
embedding_layer=None,
norm_first=False,
dict_outputs=False,
return_all_encoder_outputs=False,
**kwargs):
if 'sequence_length' in kwargs:
kwargs.pop('sequence_length')
logging.warning('`sequence_length` is a deprecated argument to '
'`BertEncoder`, which has no effect for a while. Please '
'remove `sequence_length` argument.')
if 'intermediate_size' in kwargs:
inner_dim = kwargs.pop('intermediate_size')
if 'activation' in kwargs:
inner_activation = kwargs.pop('activation')
if 'dropout_rate' in kwargs:
output_dropout = kwargs.pop('dropout_rate')
if 'attention_dropout_rate' in kwargs:
attention_dropout = kwargs.pop('attention_dropout_rate')
activation = tf.keras.activations.get(inner_activation)
initializer = tf.keras.initializers.get(initializer)
word_ids = tf.keras.layers.Input(
shape=(None,), dtype=tf.int32, name='input_word_ids')
mask = tf.keras.layers.Input(
shape=(None,), dtype=tf.int32, name='input_mask')
type_ids = tf.keras.layers.Input(
shape=(None,), dtype=tf.int32, name='input_type_ids')
if embedding_width is None:
embedding_width = hidden_size
if embedding_layer is None:
embedding_layer_inst = layers.OnDeviceEmbedding(
vocab_size=vocab_size,
embedding_width=embedding_width,
initializer=initializer,
name='word_embeddings')
else:
embedding_layer_inst = embedding_layer
word_embeddings = embedding_layer_inst(word_ids)
position_embedding_layer = layers.PositionEmbedding(
initializer=initializer,
max_length=max_sequence_length,
name='position_embedding')
position_embeddings = position_embedding_layer(word_embeddings)
type_embedding_layer = layers.OnDeviceEmbedding(
vocab_size=type_vocab_size,
embedding_width=embedding_width,
initializer=initializer,
use_one_hot=True,
name='type_embeddings')
type_embeddings = type_embedding_layer(type_ids)
embeddings = tf.keras.layers.Add()(
[word_embeddings, position_embeddings, type_embeddings])
embedding_norm_layer = tf.keras.layers.LayerNormalization(
name='embeddings/layer_norm', axis=-1, epsilon=1e-12, dtype=tf.float32)
embeddings = embedding_norm_layer(embeddings)
embeddings = (tf.keras.layers.Dropout(rate=output_dropout)(embeddings))
if embedding_width != hidden_size:
embedding_projection = tf.keras.layers.experimental.EinsumDense(
'...x,xy->...y',
output_shape=hidden_size,
bias_axes='y',
kernel_initializer=initializer,
name='embedding_projection')
embeddings = embedding_projection(embeddings)
else:
embedding_projection = None
transformer_layers = []
data = embeddings
attention_mask = layers.SelfAttentionMask()(data, mask)
encoder_outputs = []
for i in range(num_layers):
if i == num_layers - 1 and output_range is not None:
transformer_output_range = output_range
else:
transformer_output_range = None
layer = layers.TransformerEncoderBlock(
num_attention_heads=num_attention_heads,
inner_dim=inner_dim,
inner_activation=inner_activation,
output_dropout=output_dropout,
attention_dropout=attention_dropout,
norm_first=norm_first,
output_range=transformer_output_range,
kernel_initializer=initializer,
name='transformer/layer_%d' % i)
transformer_layers.append(layer)
data = layer([data, attention_mask])
encoder_outputs.append(data)
last_encoder_output = encoder_outputs[-1]
first_token_tensor = last_encoder_output[:, 0, :]
pooler_layer = tf.keras.layers.Dense(
units=hidden_size,
activation='tanh',
kernel_initializer=initializer,
name='pooler_transform')
cls_output = pooler_layer(first_token_tensor)
outputs = dict(
sequence_output=encoder_outputs[-1],
pooled_output=cls_output,
encoder_outputs=encoder_outputs,
)
if dict_outputs:
super().__init__(
inputs=[word_ids, mask, type_ids], outputs=outputs, **kwargs)
else:
cls_output = outputs['pooled_output']
if return_all_encoder_outputs:
encoder_outputs = outputs['encoder_outputs']
outputs = [encoder_outputs, cls_output]
else:
sequence_output = outputs['sequence_output']
outputs = [sequence_output, cls_output]
super().__init__(
inputs=[word_ids, mask, type_ids],
outputs=outputs,
**kwargs)
self._pooler_layer = pooler_layer
self._transformer_layers = transformer_layers
self._embedding_norm_layer = embedding_norm_layer
self._embedding_layer = embedding_layer_inst
self._position_embedding_layer = position_embedding_layer
self._type_embedding_layer = type_embedding_layer
if embedding_projection is not None:
self._embedding_projection = embedding_projection
config_dict = {
'vocab_size': vocab_size,
'hidden_size': hidden_size,
'num_layers': num_layers,
'num_attention_heads': num_attention_heads,
'max_sequence_length': max_sequence_length,
'type_vocab_size': type_vocab_size,
'inner_dim': inner_dim,
'inner_activation': tf.keras.activations.serialize(activation),
'output_dropout': output_dropout,
'attention_dropout': attention_dropout,
'initializer': tf.keras.initializers.serialize(initializer),
'output_range': output_range,
'embedding_width': embedding_width,
'embedding_layer': embedding_layer,
'norm_first': norm_first,
'dict_outputs': dict_outputs,
}
self._setattr_tracking = False
self._config = config_dict
self._setattr_tracking = True
def get_embedding_table(self):
return self._embedding_layer.embeddings
def get_embedding_layer(self):
return self._embedding_layer
def get_config(self):
return self._config
@property
def transformer_layers(self):
return self._transformer_layers
@property
def pooler_layer(self):
return self._pooler_layer
@classmethod
def from_config(cls, config, custom_objects=None):
if 'embedding_layer' in config and config['embedding_layer'] is not None:
warn_string = (
'You are reloading a model that was saved with a '
'potentially-shared embedding layer object. If you contine to '
'train this model, the embedding layer will no longer be shared. '
'To work around this, load the model outside of the Keras API.')
print('WARNING: ' + warn_string)
logging.warn(warn_string)
return cls(**config)
| true
| true
|
1c4824ef58155a24d3da7e3337113da8189f354b
| 1,975
|
py
|
Python
|
day03/main.py
|
thetwoj/advent-of-code-2021
|
87a918e1f8973e3a9e5238248043ec27338939de
|
[
"MIT"
] | null | null | null |
day03/main.py
|
thetwoj/advent-of-code-2021
|
87a918e1f8973e3a9e5238248043ec27338939de
|
[
"MIT"
] | null | null | null |
day03/main.py
|
thetwoj/advent-of-code-2021
|
87a918e1f8973e3a9e5238248043ec27338939de
|
[
"MIT"
] | null | null | null |
def get_input(filename):
data = []
with open(filename, 'r') as i:
for x in i.readlines():
data.append(x.strip())
return data
def find_gamma_epsilon(report):
digit_counts = {}
binary_gamma = ""
for line in report:
for index, letter in enumerate(line):
if index in digit_counts:
digit_counts[index][letter] += 1
else:
digit_counts[index] = {"0": 0, "1": 0}
digit_counts[index][letter] += 1
for key in digit_counts:
if digit_counts[key]["0"] > digit_counts[key]["1"]:
binary_gamma += "0"
else:
binary_gamma += "1"
binary_epsilon = ""
for digit in binary_gamma:
if digit == "0":
binary_epsilon += "1"
else:
binary_epsilon += "0"
gamma = int(binary_gamma, 2)
epsilon = int(binary_epsilon, 2)
return gamma, epsilon
def find_oxygen(report):
for x in range(len(report[0])):
tracker = {"0": [], "1": []}
for line in report:
tracker[line[x]].append(line)
if len(tracker["0"]) > len(tracker["1"]):
report = tracker["0"]
else:
report = tracker["1"]
if len(report) == 1:
return int(report[0], 2)
def find_co2(report):
for x in range(len(report[0])):
tracker = {"0": [], "1": []}
for line in report:
tracker[line[x]].append(line)
if len(tracker["0"]) <= len(tracker["1"]):
report = tracker["0"]
else:
report = tracker["1"]
if len(report) == 1:
return int(report[0], 2)
def main():
report = get_input("input")
print("Part 1:")
gamma, epsilon = find_gamma_epsilon(report)
print(gamma * epsilon)
print()
print("Part 2:")
oxygen = find_oxygen(report)
co2 = find_co2(report)
print(oxygen * co2)
if __name__ == "__main__":
main()
| 25.320513
| 59
| 0.517975
|
def get_input(filename):
data = []
with open(filename, 'r') as i:
for x in i.readlines():
data.append(x.strip())
return data
def find_gamma_epsilon(report):
digit_counts = {}
binary_gamma = ""
for line in report:
for index, letter in enumerate(line):
if index in digit_counts:
digit_counts[index][letter] += 1
else:
digit_counts[index] = {"0": 0, "1": 0}
digit_counts[index][letter] += 1
for key in digit_counts:
if digit_counts[key]["0"] > digit_counts[key]["1"]:
binary_gamma += "0"
else:
binary_gamma += "1"
binary_epsilon = ""
for digit in binary_gamma:
if digit == "0":
binary_epsilon += "1"
else:
binary_epsilon += "0"
gamma = int(binary_gamma, 2)
epsilon = int(binary_epsilon, 2)
return gamma, epsilon
def find_oxygen(report):
for x in range(len(report[0])):
tracker = {"0": [], "1": []}
for line in report:
tracker[line[x]].append(line)
if len(tracker["0"]) > len(tracker["1"]):
report = tracker["0"]
else:
report = tracker["1"]
if len(report) == 1:
return int(report[0], 2)
def find_co2(report):
for x in range(len(report[0])):
tracker = {"0": [], "1": []}
for line in report:
tracker[line[x]].append(line)
if len(tracker["0"]) <= len(tracker["1"]):
report = tracker["0"]
else:
report = tracker["1"]
if len(report) == 1:
return int(report[0], 2)
def main():
report = get_input("input")
print("Part 1:")
gamma, epsilon = find_gamma_epsilon(report)
print(gamma * epsilon)
print()
print("Part 2:")
oxygen = find_oxygen(report)
co2 = find_co2(report)
print(oxygen * co2)
if __name__ == "__main__":
main()
| true
| true
|
1c4825ac328df0f6c85d36f015aea32324491bf9
| 2,660
|
py
|
Python
|
pysnmp/MWORKS-E-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 11
|
2021-02-02T16:27:16.000Z
|
2021-08-31T06:22:49.000Z
|
pysnmp/MWORKS-E-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 75
|
2021-02-24T17:30:31.000Z
|
2021-12-08T00:01:18.000Z
|
pysnmp/MWORKS-E-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 10
|
2019-04-30T05:51:36.000Z
|
2022-02-16T03:33:41.000Z
|
#
# PySNMP MIB module MWORKS-E-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/MWORKS-E-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 20:06:06 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, ValueRangeConstraint, SingleValueConstraint, ConstraintsIntersection, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "ValueRangeConstraint", "SingleValueConstraint", "ConstraintsIntersection", "ConstraintsUnion")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
ObjectIdentity, Integer32, enterprises, Bits, Unsigned32, iso, Counter64, ModuleIdentity, IpAddress, Counter32, TimeTicks, MibScalar, MibTable, MibTableRow, MibTableColumn, MibIdentifier, Gauge32, NotificationType = mibBuilder.importSymbols("SNMPv2-SMI", "ObjectIdentity", "Integer32", "enterprises", "Bits", "Unsigned32", "iso", "Counter64", "ModuleIdentity", "IpAddress", "Counter32", "TimeTicks", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "MibIdentifier", "Gauge32", "NotificationType")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
tecElite = MibIdentifier((1, 3, 6, 1, 4, 1, 217))
mworkse = MibIdentifier((1, 3, 6, 1, 4, 1, 217, 17))
am501 = MibIdentifier((1, 3, 6, 1, 4, 1, 217, 17, 1))
amMem = MibIdentifier((1, 3, 6, 1, 4, 1, 217, 17, 1, 1))
amHeap = MibIdentifier((1, 3, 6, 1, 4, 1, 217, 17, 1, 2))
amMemCeiling = MibScalar((1, 3, 6, 1, 4, 1, 217, 17, 1, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: amMemCeiling.setStatus('mandatory')
amMemUsed = MibScalar((1, 3, 6, 1, 4, 1, 217, 17, 1, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: amMemUsed.setStatus('mandatory')
amHeapTotal = MibScalar((1, 3, 6, 1, 4, 1, 217, 17, 1, 2, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: amHeapTotal.setStatus('mandatory')
amHeapUsed = MibScalar((1, 3, 6, 1, 4, 1, 217, 17, 1, 2, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: amHeapUsed.setStatus('mandatory')
mibBuilder.exportSymbols("MWORKS-E-MIB", amHeapTotal=amHeapTotal, mworkse=mworkse, amHeapUsed=amHeapUsed, am501=am501, amMem=amMem, amMemCeiling=amMemCeiling, amMemUsed=amMemUsed, tecElite=tecElite, amHeap=amHeap)
| 95
| 505
| 0.749624
|
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, ValueRangeConstraint, SingleValueConstraint, ConstraintsIntersection, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "ValueRangeConstraint", "SingleValueConstraint", "ConstraintsIntersection", "ConstraintsUnion")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
ObjectIdentity, Integer32, enterprises, Bits, Unsigned32, iso, Counter64, ModuleIdentity, IpAddress, Counter32, TimeTicks, MibScalar, MibTable, MibTableRow, MibTableColumn, MibIdentifier, Gauge32, NotificationType = mibBuilder.importSymbols("SNMPv2-SMI", "ObjectIdentity", "Integer32", "enterprises", "Bits", "Unsigned32", "iso", "Counter64", "ModuleIdentity", "IpAddress", "Counter32", "TimeTicks", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "MibIdentifier", "Gauge32", "NotificationType")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
tecElite = MibIdentifier((1, 3, 6, 1, 4, 1, 217))
mworkse = MibIdentifier((1, 3, 6, 1, 4, 1, 217, 17))
am501 = MibIdentifier((1, 3, 6, 1, 4, 1, 217, 17, 1))
amMem = MibIdentifier((1, 3, 6, 1, 4, 1, 217, 17, 1, 1))
amHeap = MibIdentifier((1, 3, 6, 1, 4, 1, 217, 17, 1, 2))
amMemCeiling = MibScalar((1, 3, 6, 1, 4, 1, 217, 17, 1, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: amMemCeiling.setStatus('mandatory')
amMemUsed = MibScalar((1, 3, 6, 1, 4, 1, 217, 17, 1, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: amMemUsed.setStatus('mandatory')
amHeapTotal = MibScalar((1, 3, 6, 1, 4, 1, 217, 17, 1, 2, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: amHeapTotal.setStatus('mandatory')
amHeapUsed = MibScalar((1, 3, 6, 1, 4, 1, 217, 17, 1, 2, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: amHeapUsed.setStatus('mandatory')
mibBuilder.exportSymbols("MWORKS-E-MIB", amHeapTotal=amHeapTotal, mworkse=mworkse, amHeapUsed=amHeapUsed, am501=am501, amMem=amMem, amMemCeiling=amMemCeiling, amMemUsed=amMemUsed, tecElite=tecElite, amHeap=amHeap)
| true
| true
|
1c48263cc96b9da7e221dfd293fac9cea7534f3f
| 351
|
py
|
Python
|
apps/authentication/migrations/0018_merge.py
|
kharann/onlineweb4
|
1130128c6233b623780779a25934ea73ef62c264
|
[
"MIT"
] | null | null | null |
apps/authentication/migrations/0018_merge.py
|
kharann/onlineweb4
|
1130128c6233b623780779a25934ea73ef62c264
|
[
"MIT"
] | null | null | null |
apps/authentication/migrations/0018_merge.py
|
kharann/onlineweb4
|
1130128c6233b623780779a25934ea73ef62c264
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-01-30 19:25
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('authentication', '0014_auto_20151214_0117'),
('authentication', '0017_auto_20160128_1719'),
]
operations = [
]
| 20.647059
| 54
| 0.675214
|
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('authentication', '0014_auto_20151214_0117'),
('authentication', '0017_auto_20160128_1719'),
]
operations = [
]
| true
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.