hexsha stringlengths 40 40 | size int64 3 1.03M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 3 972 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 972 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 972 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 3 1.03M | avg_line_length float64 1.13 941k | max_line_length int64 2 941k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ad6d7da417555ea54e6ee28483afa72e65887941 | 2,188 | py | Python | userbot/modules/wall.py | Ajibcdefgh/ProjectYrzzr | 769844ecb57da00b8a43d5f872e4c3053da9cd02 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | 6 | 2021-01-10T13:51:35.000Z | 2022-02-28T20:25:55.000Z | userbot/modules/wall.py | Ajibcdefgh/ProjectYrzzr | 769844ecb57da00b8a43d5f872e4c3053da9cd02 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | 4 | 2021-12-19T22:45:50.000Z | 2021-12-19T22:45:50.000Z | userbot/modules/wall.py | Ajibcdefgh/ProjectYrzzr | 769844ecb57da00b8a43d5f872e4c3053da9cd02 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | 68 | 2020-11-04T14:22:09.000Z | 2022-03-07T14:46:37.000Z | # Copyright (C) 2020 Alfiananda P.A
#
# Licensed under the General Public License, Version 3.0;
# you may not use this file except in compliance with the License.
#
import asyncio
import os
from asyncio.exceptions import TimeoutError
from telethon.errors.rpcerrorlist import YouBlockedUserError
from userbot import CMD_HELP, bot
from userbot.events import register
@register(outgoing=True, pattern=r"^\.wall(?: |$)(.*)")
async def _(event):
try:
query = event.pattern_match.group(1)
await event.edit("`Mencari wallpaper..`")
async with bot.conversation("@userbotindobot") as conv:
try:
query1 = await conv.send_message(f"/wall {query}")
asyncio.sleep(3)
r1 = await conv.get_response()
r2 = await conv.get_response()
await bot.send_read_acknowledge(conv.chat_id)
except YouBlockedUserError:
return await event.reply("Unblock @userbotindobot plox")
if r1.text.startswith("No"):
return await event.edit(f"`Tidak ada hasil untuk` **{query}**")
else:
img = await event.client.download_media(r1)
img2 = await event.client.download_media(r2)
await event.edit("`Mengupload..`")
p = await event.client.send_file(
event.chat_id,
img,
force_document=False,
caption="Preview",
reply_to=event.reply_to_msg_id,
)
await event.client.send_file(
event.chat_id,
img2,
force_document=True,
caption=f"{query}",
reply_to=p,
)
await event.client.delete_messages(
conv.chat_id, [r1.id, r2.id, query1.id]
)
await event.delete()
os.system("rm *.png *.jpg")
except TimeoutError:
return await event.edit("`tidak merespon..`")
CMD_HELP.update(
{"wallpaper": ">`.wall <query>`" "\nUsage: cari gambar wallpaper yang indah."}
)
| 34.730159 | 82 | 0.553931 |
a34baef7eef4d9300437b24419091554c232f1d2 | 7,419 | py | Python | lmctl/client/client.py | manojn97/lmctl | 844925cb414722351efac90cb97f10c1185eef7a | [
"Apache-2.0"
] | 3 | 2021-07-19T09:46:01.000Z | 2022-03-07T13:51:25.000Z | lmctl/client/client.py | manojn97/lmctl | 844925cb414722351efac90cb97f10c1185eef7a | [
"Apache-2.0"
] | 43 | 2019-08-27T12:36:29.000Z | 2020-08-27T14:50:40.000Z | lmctl/client/client.py | manojn97/lmctl | 844925cb414722351efac90cb97f10c1185eef7a | [
"Apache-2.0"
] | 7 | 2020-09-22T20:32:17.000Z | 2022-03-29T12:25:51.000Z | from .api import *
from typing import Dict
from urllib.parse import urlparse, urlencode
from .exceptions import TNCOClientError, TNCOClientHttpError
from .auth_type import AuthType
from .auth_tracker import AuthTracker
from .error_capture import tnco_error_capture
from .client_test_result import TestResult, TestResults
from .client_request import TNCOClientRequest
from lmctl.utils.trace_ctx import trace_ctx
import requests
import logging
logger = logging.getLogger(__name__)
class TNCOClient:
"""
Base client for TNCO
TNCO APIs are grouped by functional attributes.
"""
POST = 'post'
GET = 'get'
PUT = 'put'
DELETE = 'delete'
def __init__(self, address: str, auth_type: AuthType = None, kami_address: str = None, use_sessions: bool = False):
self.address = self._parse_address(address)
self.auth_type = auth_type
self.kami_address = kami_address
self.auth_tracker = AuthTracker() if self.auth_type is not None else None
self._session = None
self.use_sessions = use_sessions
def _parse_address(self, address: str) -> str:
if address is not None:
while address.endswith('/'):
address = address[:-1]
return address
def close(self):
if self._session is not None:
self._session.close()
def _curr_session(self):
if self.use_sessions:
if self._session is None:
self._session = requests.Session()
return self._session
else:
return requests
def get_access_token(self) -> str:
if self.auth_tracker is not None:
if self.auth_tracker.has_access_expired:
auth_response = self.auth_type.handle(self)
self.auth_tracker.accept_auth_response(auth_response)
return self.auth_tracker.current_access_token
else:
return None
def _add_auth_headers(self, headers: Dict) -> Dict:
if self.auth_tracker is not None:
access_token = self.get_access_token()
headers['Authorization'] = f'Bearer {self.auth_tracker.current_access_token}'
return headers
def _supplement_headers(self, headers: Dict, inject_current_auth: bool = True) -> Dict:
trace_ctx_headers = trace_ctx.to_http_header_dict()
logger.debug(f'CP4NA orchestration request headers from trace ctx: {trace_ctx_headers}')
headers.update(trace_ctx_headers)
if inject_current_auth:
self._add_auth_headers(headers=headers)
return headers
def make_request(self, request: TNCOClientRequest) -> requests.Response:
url = request.override_address if request.override_address else self.address
if request.endpoint is not None:
url = f'{url}/{request.endpoint}'
request_kwargs = {}
if request.query_params is not None and len(request.query_params) > 0:
request_kwargs['params'] = request.query_params
if request.body is not None:
request_kwargs['data'] = request.body
if request.files is not None and len(request.files) > 0:
request_kwargs['files'] = request.files
request_kwargs['headers'] = {}
if request.headers is not None:
request_kwargs['headers'].update(request.headers)
# Log before adding sensitive data
logger.debug(f'CP4NA orchestration request: Method={request.method}, URL={url}, Request Kwargs={request_kwargs}')
if request.additional_auth_handler is not None:
request_kwargs['auth'] = request.additional_auth_handler
self._supplement_headers(headers=request_kwargs['headers'], inject_current_auth=request.inject_current_auth)
try:
response = self._curr_session().request(method=request.method, url=url, verify=False, **request_kwargs)
except requests.RequestException as e:
raise TNCOClientError(str(e)) from e
logger.debug(f'CP4NA orchestration request has returned: Method={request.method}, URL={url}, Response={response}')
try:
response.raise_for_status()
except requests.HTTPError as e:
raise TNCOClientHttpError(f'{request.method} request to {url} failed', e) from e
return response
def make_request_for_json(self, request: TNCOClientRequest) -> Dict:
response = self.make_request(request)
try:
return response.json()
except ValueError as e:
raise TNCOClientError(f'Failed to parse response to JSON: {str(e)}') from e
def ping(self, include_template_engine: bool = False) -> Dict:
with tnco_error_capture() as A:
self.descriptors.all()
with tnco_error_capture() as B:
self.deployment_locations.all()
with tnco_error_capture() as C:
self.behaviour_projects.all()
with tnco_error_capture() as D:
self.shared_inf_keys.all()
tests = []
tests.append(TestResult(name='Descriptors', error=A.error))
tests.append(TestResult(name='Topology', error=B.error))
tests.append(TestResult(name='Behaviour', error=C.error))
tests.append(TestResult(name='Resource Manager', error=D.error))
if include_template_engine:
with tnco_error_capture() as templates:
self.descriptor_templates.all()
tests.append(TestResult(name='Template Engine', error=templates.error))
return TestResults(tests=tests)
@property
def auth(self) -> AuthenticationAPI:
return AuthenticationAPI(self)
@property
def assemblies(self) -> AssembliesAPI:
return AssembliesAPI(self)
@property
def behaviour_assembly_confs(self) -> BehaviourAssemblyConfigurationsAPI:
return BehaviourAssemblyConfigurationsAPI(self)
@property
def behaviour_projects(self) -> BehaviourProjectsAPI:
return BehaviourProjectsAPI(self)
@property
def behaviour_scenarios(self) -> BehaviourScenariosAPI:
return BehaviourScenariosAPI(self)
@property
def behaviour_scenario_execs(self) -> BehaviourScenarioExecutionsAPI:
return BehaviourScenarioExecutionsAPI(self)
@property
def deployment_locations(self) -> DeploymentLocationAPI:
return DeploymentLocationAPI(self)
@property
def descriptors(self) -> DescriptorsAPI:
return DescriptorsAPI(self)
@property
def descriptor_templates(self) -> DescriptorTemplatesAPI:
return DescriptorTemplatesAPI(self)
@property
def lifecycle_drivers(self) -> LifecycleDriversAPI:
return LifecycleDriversAPI(self)
@property
def processes(self) -> ProcessesAPI:
return ProcessesAPI(self)
@property
def resource_drivers(self) -> ResourceDriversAPI:
return ResourceDriversAPI(self)
@property
def resource_packages(self) -> ResourcePackagesAPI:
return ResourcePackagesAPI(self)
@property
def resource_managers(self) -> ResourceManagersAPI:
return ResourceManagersAPI(self)
@property
def shared_inf_keys(self) -> SharedInfrastructureKeysAPI:
return SharedInfrastructureKeysAPI(self)
@property
def vim_drivers(self) -> VIMDriversAPI:
return VIMDriversAPI(self)
| 36.367647 | 122 | 0.672193 |
1d4073cf594565707c45cf6e1a2e921c9d032364 | 12,970 | py | Python | server/views/user.py | Yunicorn228/web-tools | 056d2d8310f3096c8be90638342bb3cc5715a89f | [
"Apache-2.0"
] | 1 | 2021-07-18T13:08:09.000Z | 2021-07-18T13:08:09.000Z | server/views/user.py | Yunicorn228/web-tools | 056d2d8310f3096c8be90638342bb3cc5715a89f | [
"Apache-2.0"
] | null | null | null | server/views/user.py | Yunicorn228/web-tools | 056d2d8310f3096c8be90638342bb3cc5715a89f | [
"Apache-2.0"
] | null | null | null | import logging
from flask import jsonify, request, redirect, send_file
import flask_login
from mediacloud.error import MCException
import tempfile
import json
import os
import csv
import io
import zipfile
from server import app, auth, mc, user_db
from server.auth import user_mediacloud_client, user_name, user_is_admin
from server.util.request import api_error_handler, form_fields_required, arguments_required, json_error_response
from server.views.topics.topiclist import topics_user_can_access
logger = logging.getLogger(__name__)
AUTH_MANAGEMENT_DOMAIN = 'https://tools.mediacloud.org' # because it is too hard to tell which site you are on
ACTIVATION_URL = AUTH_MANAGEMENT_DOMAIN + "/api/user/activate/confirm"
PASSWORD_RESET_URL = AUTH_MANAGEMENT_DOMAIN + "/api/user/reset-password-request-receive"
def _create_user_session(user_results):
if not isinstance(user_results, dict):
user_results = user_results.get_properties()
# HACK: the API used to return this as true/false, but not returns it as 1 or 0, so we change it to
# boolean here so we don't have to change front-end JS logic
user_results['profile']['has_consented'] = (user_results['profile']['has_consented'] == 1) or \
(user_results['profile']['has_consented'] is True)
merged_user_info = user_results['profile'].copy() # start with x's keys and values
if 'error' in user_results:
return json_error_response(user_results['error'], 401)
user = auth.create_user(merged_user_info)
return user
@app.route('/api/login', methods=['POST'])
@form_fields_required('email', 'password')
@api_error_handler
def login_with_password():
username = request.form["email"]
logger.debug("login request from %s", username)
password = request.form["password"]
# try to log them in
results = mc.authLogin(username, password)
user = _create_user_session(results)
logger.debug(" succeeded - got a key (user.is_anonymous=%s)", user.is_anonymous)
auth.login_user(user)
return jsonify(user.get_properties())
@app.route('/api/login-with-cookie')
@api_error_handler
def login_with_cookie():
cached_user = flask_login.current_user
if cached_user.is_anonymous: # no user session
logger.debug(" login failed (%s)", cached_user.is_anonymous)
return json_error_response("Login failed", 401)
user = _create_user_session(cached_user)
return jsonify(user.get_properties())
@app.route('/api/user/signup', methods=['POST'])
@form_fields_required('email', 'password', 'fullName', 'notes', 'has_consented')
@api_error_handler
def signup():
logger.debug("reg request from %s", request.form['email'])
results = mc.authRegister(request.form['email'],
request.form['password'],
request.form['fullName'],
request.form['notes'],
ACTIVATION_URL,
bool(request.form['has_consented'] == 'true') if 'has_consented' in request.form else False,
)
return jsonify(results)
@app.route('/api/user/activate/confirm', methods=['GET'])
@arguments_required('email', 'activation_token')
def activation_confirm():
logger.debug("activation request from %s", request.args['email'])
try:
results = mc.authActivate(request.args['email'], request.args['activation_token'])
if results['success'] == 1:
redirect_to_return = redirect(AUTH_MANAGEMENT_DOMAIN + '/#/user/activated?success=1')
else:
redirect_to_return = redirect(AUTH_MANAGEMENT_DOMAIN + '/#/user/activated?success=0&msg=' +
results['error'])
except MCException as mce:
# this is long stack trace so we have to trim it for url length support
redirect_to_return = redirect(AUTH_MANAGEMENT_DOMAIN + '/#/user/activated?success=0&msg=' + str(mce)[:300])
return redirect_to_return
@app.route('/api/user/activation/resend', methods=['POST'])
@form_fields_required('email')
@api_error_handler
def activation_resend():
email = request.form['email']
logger.debug("activation request from %s", email)
results = mc.authResendActivationLink(email, ACTIVATION_URL)
return jsonify(results)
@app.route('/api/user/reset-password-request', methods=['POST'])
@form_fields_required('email')
@api_error_handler
def request_password_reset():
logger.debug("request password reset from %s", request.form['email'])
results = mc.authSendPasswordResetLink(request.form["email"], PASSWORD_RESET_URL)
return jsonify(results)
# crazy redirect workaround becasue the backend isn't handling the #-based URL part we want to use
@app.route('/api/user/reset-password-request-receive', methods=['GET'])
@arguments_required('email', 'password_reset_token')
@api_error_handler
def request_password_reset_receive():
redirect_to_return = redirect(AUTH_MANAGEMENT_DOMAIN +
'/#/user/reset-password?email={}&password_reset_token={}'.format(
request.args['email'], request.args['password_reset_token']))
return redirect_to_return
@app.route('/api/user/reset-password', methods=['POST'])
@form_fields_required('email', 'password_reset_token', 'new_password')
@api_error_handler
def reset_password():
logger.debug("reset password for %s", request.form['email'])
results = mc.authResetPassword(request.form["email"], request.form['password_reset_token'],
request.form['new_password'])
return jsonify(results)
@app.route('/api/user/change-password', methods=['POST'])
@form_fields_required('old_password', 'new_password')
@flask_login.login_required
@api_error_handler
def change_password():
user_mc = user_mediacloud_client()
results = {}
try:
results = user_mc.authChangePassword(request.form['old_password'], request.form['new_password'])
except MCException as e:
logger.exception(e)
if 'Unable to change password' in e.message:
if 'Old password is incorrect' in e.message or 'Unable to log in with old password' in e.message:
return json_error_response('Unable to change password - old password is incorrect')
if 'not found or password is incorrect' in e.message:
return json_error_response('Unable to change password - user not found or password is incorrect')
else:
return json_error_response('Unable to change password - see log for more details')
return jsonify(results)
@app.route('/api/user/reset-api-key', methods=['POST'])
@flask_login.login_required
@api_error_handler
def reset_api_key():
user_mc = user_mediacloud_client()
results = user_mc.authResetApiKey()
flask_login.current_user.update_profile(results['profile']) # update server api key too
return jsonify(results)
@app.route('/api/user/logout')
def logout():
flask_login.logout_user()
return redirect("/")
@app.route('/api/user/delete', methods=['POST'])
@form_fields_required('email')
@api_error_handler
@flask_login.login_required
def api_user_delete():
email = request.form['email']
user = flask_login.current_user
if email == user.name: # double-check confirmation they typed in
# delete them from the front-end system database
user_db.delete_user(user.name)
# delete them from the back-end system
results = mc.userDelete(user.profile['auth_users_id']) # need to do this with the tool's admin account
try:
if ('success' in results) and (results['success'] == 1):
return logout()
else:
return json_error_response("We failed to delete your account, sorry!", 400)
except MCException as mce:
logger.exception(mce)
return json_error_response("We failed to delete your account, sorry!", 400)
else:
return json_error_response("Your email confirmation didn't match.", 400)
@app.route('/api/user/update', methods=['POST'])
@form_fields_required('full_name', 'notes', 'has_consented')
@api_error_handler
@flask_login.login_required
def api_user_update():
has_consented = request.form['has_consented'] if 'has_consented' in request.form else False
if has_consented == 'null':
has_consented = False
valid_params = {
'full_name': request.form['full_name'],
'notes': request.form['notes'],
'has_consented': has_consented
}
cached_user = flask_login.current_user
# need to update user with the tool admin client, because user doesn't have permission to do this themselves
mc.userUpdate(cached_user.profile['auth_users_id'], **valid_params)
user_mc = user_mediacloud_client()
updated_user = user_mc.userProfile()
cached_user.profile = updated_user
user = _create_user_session(cached_user)
return jsonify(user.get_properties())
@app.route('/api/user/download-data')
@api_error_handler
@flask_login.login_required
def api_user_data_download():
user_mc = user_mediacloud_client()
temp_user_data_dir = _save_user_data_dir(flask_login.current_user, user_mc)
data = _zip_in_memory(temp_user_data_dir) # do this in memory to be extra safe on security
return send_file(data, mimetype='application/zip', as_attachment=True, attachment_filename='mediacloud-data.zip')
def _zip_in_memory(dir_to_zip):
# remember our home dir
old_path = os.getcwd()
os.chdir(dir_to_zip)
# send
data = io.BytesIO()
with zipfile.ZipFile(data, mode='w') as z:
for f_name in os.listdir("."): # doing the whole path switch to make sure the zip folder structure is right
z.write(f_name)
os.unlink(f_name)
data.seek(0) # to make sure the file starts at teh begging again, *not* where the zip commands left it
# put us back in the home dir
os.chdir(old_path)
os.rmdir(dir_to_zip)
return data
def _save_user_data_dir(u, user_mc):
# make a dir first (prefix with user_id for extra security)
temp_dir = tempfile.mkdtemp(prefix='user{}'.format(u.profile['auth_users_id']))
# user profile
with open(os.path.join(temp_dir, 'profile.json'), 'w') as outfile:
profile = u.profile
json.dump(profile, outfile)
# topic-level permissions
with open(os.path.join(temp_dir, 'topic-permissions.csv'), 'w') as outfile:
topics = user_mc.topicList(limit=1000)['topics']
user_owned_topics = topics_user_can_access(topics, u.profile['email'], user_is_admin())
topic_permission_list = [{
'topics_id': t['topics_id'],
'topic_name': t['name'],
'permission': t['user_permission'],
} for t in user_owned_topics]
writer = csv.DictWriter(outfile, ['topics_id', 'topic_name', 'permission'])
writer.writeheader()
writer.writerows(topic_permission_list)
# saved searches
with open(os.path.join(temp_dir, 'saved-searches.json'), 'w') as outfile:
search_list = user_db.get_users_lists(u.name, 'searches')
json.dump(search_list, outfile)
# starred sources
with open(os.path.join(temp_dir, 'starred-sources.csv'), 'w') as outfile:
user_favorited = user_db.get_users_lists(user_name(), 'favoriteSources')
media_sources = [user_mc.media(media_id) for media_id in user_favorited]
media_sources = [{
'media_id': m['media_id'],
'name': m['name'],
'url': m['url']
} for m in media_sources]
writer = csv.DictWriter(outfile, ['media_id', 'name', 'url'])
writer.writeheader()
writer.writerows(media_sources)
# starred collections
with open(os.path.join(temp_dir, 'starred-collections.csv'), 'w') as outfile:
user_favorited = user_db.get_users_lists(user_name(), 'favoriteCollections')
collections = [user_mc.tag(tags_id) for tags_id in user_favorited]
collections = [{
'tags_id': c['tags_id'],
'label': c['label'],
'description': c['description']
} for c in collections]
writer = csv.DictWriter(outfile, ['tags_id', 'label', 'description'])
writer.writeheader()
writer.writerows(collections)
# starred topics
with open(os.path.join(temp_dir, 'starred-topics.csv'), 'w') as outfile:
user_favorited = user_db.get_users_lists(user_name(), 'favoriteTopics')
topics = [user_mc.topic(topics_id) for topics_id in user_favorited]
topics = [{
'topics_id': t['topics_id'],
'name': t['name'],
'description': t['description']
} for t in topics]
writer = csv.DictWriter(outfile, ['topics_id', 'name', 'description'])
writer.writeheader()
writer.writerows(topics)
return temp_dir
| 41.70418 | 122 | 0.679029 |
25ef2c54dfa2392791f90e94ac69d4d2b872a092 | 1,969 | py | Python | data/compute_budget.py | minorchange/co2_monitor_hd | 9d91d3dcc8363c547a9040d9ea2c97d05cc82502 | [
"MIT"
] | null | null | null | data/compute_budget.py | minorchange/co2_monitor_hd | 9d91d3dcc8363c547a9040d9ea2c97d05cc82502 | [
"MIT"
] | null | null | null | data/compute_budget.py | minorchange/co2_monitor_hd | 9d91d3dcc8363c547a9040d9ea2c97d05cc82502 | [
"MIT"
] | null | null | null | import os, sys
import datetime
from dateutil.relativedelta import relativedelta
from data.read_data import read_budget
def get_remaining_paris_budget(df):
assert "co2_kt_total" in df.columns
budget_start_year, budget_start_value_kt = read_budget()
s_total = df["co2_kt_total"].dropna()
s_measured_since_budgetstart = s_total[s_total.index >= budget_start_year]
measured_co2kt_since_budgetstart = s_measured_since_budgetstart.sum()
last_measured_year = s_total.index.max()
latest_emissions_ktperyear = s_total[last_measured_year]
assert latest_emissions_ktperyear > -1
assert latest_emissions_ktperyear < 3000
seconds_per_year = 60 * 60 * 24 * 365.25
latest_emissions_ktpersecond = latest_emissions_ktperyear / seconds_per_year
last_measured_year_since_before_budgetstart = max(
last_measured_year, budget_start_year - 1
)
last_measured_second = datetime.datetime.strptime(
f"{last_measured_year_since_before_budgetstart}-12-31 23:59:59",
"%Y-%m-%d %H:%M:%S",
)
now = datetime.datetime.now()
seconds_since_last_measured_second = (now - last_measured_second).total_seconds()
estimated_emissions_kt = (
latest_emissions_ktpersecond * seconds_since_last_measured_second
)
total_emissions_kt = measured_co2kt_since_budgetstart + estimated_emissions_kt
remaining_budget_kt = budget_start_value_kt - total_emissions_kt
remaining_seconds = remaining_budget_kt / latest_emissions_ktpersecond
when_budget_is_depleted = now + relativedelta(seconds=+remaining_seconds)
return remaining_budget_kt, when_budget_is_depleted
if __name__ == "__main__":
from read_data import read_emissions
df_emissions = read_emissions()
remaining_budget_kt, when_budget_is_depleted = get_remaining_paris_budget(
df_emissions, trend
)
print(total_emissions_kt)
print(remaining_budget_kt)
print(remaining_budget_kt.year)
| 32.278689 | 85 | 0.771965 |
dbbf7db9d3f992062ccce7c4d9e5a13cadec63c6 | 15,522 | py | Python | tensorflow_probability/python/distributions/deterministic.py | nxdao2000/probability | 33d2bc1cb0e7b6284579ea7f3692b9d056e0d700 | [
"Apache-2.0"
] | null | null | null | tensorflow_probability/python/distributions/deterministic.py | nxdao2000/probability | 33d2bc1cb0e7b6284579ea7f3692b9d056e0d700 | [
"Apache-2.0"
] | null | null | null | tensorflow_probability/python/distributions/deterministic.py | nxdao2000/probability | 33d2bc1cb0e7b6284579ea7f3692b9d056e0d700 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""The Deterministic distribution class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
# Dependency imports
import six
import tensorflow.compat.v2 as tf
from tensorflow_probability.python.distributions import distribution
from tensorflow_probability.python.distributions import kullback_leibler
from tensorflow_probability.python.internal import assert_util
from tensorflow_probability.python.internal import dtype_util
from tensorflow_probability.python.internal import reparameterization
from tensorflow_probability.python.internal import tensor_util
from tensorflow_probability.python.internal import tensorshape_util
__all__ = [
"Deterministic",
"VectorDeterministic",
]
@six.add_metaclass(abc.ABCMeta)
class _BaseDeterministic(distribution.Distribution):
"""Base class for Deterministic distributions."""
def __init__(self,
loc,
atol=None,
rtol=None,
is_vector=False,
validate_args=False,
allow_nan_stats=True,
parameters=None,
name="_BaseDeterministic"):
"""Initialize a batch of `_BaseDeterministic` distributions.
The `atol` and `rtol` parameters allow for some slack in `pmf`, `cdf`
computations, e.g. due to floating-point error.
```
pmf(x; loc)
= 1, if Abs(x - loc) <= atol + rtol * Abs(loc),
= 0, otherwise.
```
Args:
loc: Numeric `Tensor`. The point (or batch of points) on which this
distribution is supported.
atol: Non-negative `Tensor` of same `dtype` as `loc` and broadcastable
shape. The absolute tolerance for comparing closeness to `loc`.
Default is `0`.
rtol: Non-negative `Tensor` of same `dtype` as `loc` and broadcastable
shape. The relative tolerance for comparing closeness to `loc`.
Default is `0`.
is_vector: Python `bool`. If `True`, this is for `VectorDeterministic`,
else `Deterministic`.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
allow_nan_stats: Python `bool`, default `True`. When `True`, statistics
(e.g., mean, mode, variance) use the value "`NaN`" to indicate the
result is undefined. When `False`, an exception is raised if one or
more of the statistic's batch members are undefined.
parameters: Dict of locals to facilitate copy construction.
name: Python `str` name prefixed to Ops created by this class.
Raises:
ValueError: If `loc` is a scalar.
"""
with tf.name_scope(name) as name:
dtype = dtype_util.common_dtype([loc, atol, rtol], dtype_hint=tf.float32)
self._loc = tensor_util.convert_immutable_to_tensor(
loc, dtype_hint=dtype, name="loc")
self._atol = tensor_util.convert_immutable_to_tensor(
0 if atol is None else atol, dtype=dtype, name="atol")
self._rtol = tensor_util.convert_immutable_to_tensor(
0 if rtol is None else rtol, dtype=dtype, name="rtol")
self._is_vector = is_vector
super(_BaseDeterministic, self).__init__(
dtype=self._loc.dtype,
reparameterization_type=(
reparameterization.FULLY_REPARAMETERIZED
if self._loc.dtype.is_floating
else reparameterization.NOT_REPARAMETERIZED),
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
parameters=parameters,
name=name)
def _slack(self, loc):
# Avoid using the large broadcast with self.loc if possible.
if self.parameters["rtol"] is None:
return self.atol
else:
return self.atol + self.rtol * tf.abs(loc)
@property
def loc(self):
"""Point (or batch of points) at which this distribution is supported."""
return self._loc
@property
def atol(self):
"""Absolute tolerance for comparing points to `self.loc`."""
return self._atol
@property
def rtol(self):
"""Relative tolerance for comparing points to `self.loc`."""
return self._rtol
def _entropy(self):
return tf.zeros(self.batch_shape_tensor(), dtype=self.dtype)
def _mean(self):
return tf.identity(self.loc)
def _variance(self):
return tf.zeros_like(self.loc)
def _mode(self):
return self.mean()
def _sample_n(self, n, seed=None):
del seed # unused
loc = tf.convert_to_tensor(self.loc)
return tf.broadcast_to(
loc,
tf.concat([[n], self._batch_shape_tensor(loc=loc),
self._event_shape_tensor(loc=loc)],
axis=0))
def _parameter_control_dependencies(self, is_init):
assertions = []
# In init, we can always build shape and dtype checks because
# we assume shape doesn't change for Variable backed args.
if is_init and self._is_vector:
msg = "Argument `loc` must be at least rank 1."
if tensorshape_util.rank(self.loc.shape) is not None:
if tensorshape_util.rank(self.loc.shape) < 1:
raise ValueError(msg)
elif self.validate_args:
assertions.append(
assert_util.assert_rank_at_least(self.loc, 1, message=msg))
if not self.validate_args:
assert not assertions # Should never happen
return []
if is_init != tensor_util.is_mutable(self.atol):
assertions.append(
assert_util.assert_non_negative(
self.atol, message="Argument 'atol' must be non-negative"))
if is_init != tensor_util.is_mutable(self.rtol):
assertions.append(
assert_util.assert_non_negative(
self.rtol, message="Argument 'rtol' must be non-negative"))
return assertions
class Deterministic(_BaseDeterministic):
"""Scalar `Deterministic` distribution on the real line.
The scalar `Deterministic` distribution is parameterized by a [batch] point
`loc` on the real line. The distribution is supported at this point only,
and corresponds to a random variable that is constant, equal to `loc`.
See [Degenerate rv](https://en.wikipedia.org/wiki/Degenerate_distribution).
#### Mathematical Details
The probability mass function (pmf) and cumulative distribution function (cdf)
are
```none
pmf(x; loc) = 1, if x == loc, else 0
cdf(x; loc) = 1, if x >= loc, else 0
```
#### Examples
```python
# Initialize a single Deterministic supported at zero.
constant = tfp.distributions.Deterministic(0.)
constant.prob(0.)
==> 1.
constant.prob(2.)
==> 0.
# Initialize a [2, 2] batch of scalar constants.
loc = [[0., 1.], [2., 3.]]
x = [[0., 1.1], [1.99, 3.]]
constant = tfp.distributions.Deterministic(loc)
constant.prob(x)
==> [[1., 0.], [0., 1.]]
```
"""
def __init__(self,
loc,
atol=None,
rtol=None,
validate_args=False,
allow_nan_stats=True,
name="Deterministic"):
"""Initialize a scalar `Deterministic` distribution.
The `atol` and `rtol` parameters allow for some slack in `pmf`, `cdf`
computations, e.g. due to floating-point error.
```
pmf(x; loc)
= 1, if Abs(x - loc) <= atol + rtol * Abs(loc),
= 0, otherwise.
```
Args:
loc: Numeric `Tensor` of shape `[B1, ..., Bb]`, with `b >= 0`.
The point (or batch of points) on which this distribution is supported.
atol: Non-negative `Tensor` of same `dtype` as `loc` and broadcastable
shape. The absolute tolerance for comparing closeness to `loc`.
Default is `0`.
rtol: Non-negative `Tensor` of same `dtype` as `loc` and broadcastable
shape. The relative tolerance for comparing closeness to `loc`.
Default is `0`.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
allow_nan_stats: Python `bool`, default `True`. When `True`, statistics
(e.g., mean, mode, variance) use the value "`NaN`" to indicate the
result is undefined. When `False`, an exception is raised if one or
more of the statistic's batch members are undefined.
name: Python `str` name prefixed to Ops created by this class.
"""
parameters = dict(locals())
super(Deterministic, self).__init__(
loc,
atol=atol,
rtol=rtol,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
parameters=parameters,
name=name)
@classmethod
def _params_event_ndims(cls):
return dict(loc=0, atol=0, rtol=0)
def _batch_shape_tensor(self, loc=None):
return tf.broadcast_dynamic_shape(
tf.shape(self.loc if loc is None else loc),
tf.broadcast_dynamic_shape(tf.shape(self.atol), tf.shape(self.rtol)))
def _batch_shape(self):
return tf.broadcast_static_shape(
self.loc.shape,
tf.broadcast_static_shape(self.atol.shape, self.rtol.shape))
def _event_shape_tensor(self, loc=None):
del loc
return tf.constant([], dtype=tf.int32)
def _event_shape(self):
return tf.TensorShape([])
def _prob(self, x):
loc = tf.convert_to_tensor(self.loc)
# Enforces dtype of probability to be float, when self.dtype is not.
prob_dtype = self.dtype if self.dtype.is_floating else tf.float32
return tf.cast(tf.abs(x - loc) <= self._slack(loc), dtype=prob_dtype)
def _cdf(self, x):
loc = tf.identity(self.loc)
return tf.cast(x >= loc - self._slack(loc), dtype=self.dtype)
class VectorDeterministic(_BaseDeterministic):
"""Vector `Deterministic` distribution on `R^k`.
The `VectorDeterministic` distribution is parameterized by a [batch] point
`loc in R^k`. The distribution is supported at this point only,
and corresponds to a random variable that is constant, equal to `loc`.
See [Degenerate rv](https://en.wikipedia.org/wiki/Degenerate_distribution).
#### Mathematical Details
The probability mass function (pmf) is
```none
pmf(x; loc)
= 1, if All[Abs(x - loc) <= atol + rtol * Abs(loc)],
= 0, otherwise.
```
#### Examples
```python
tfd = tfp.distributions
# Initialize a single VectorDeterministic supported at [0., 2.] in R^2.
constant = tfd.Deterministic([0., 2.])
constant.prob([0., 2.])
==> 1.
constant.prob([0., 3.])
==> 0.
# Initialize a [3] batch of constants on R^2.
loc = [[0., 1.], [2., 3.], [4., 5.]]
constant = tfd.VectorDeterministic(loc)
constant.prob([[0., 1.], [1.9, 3.], [3.99, 5.]])
==> [1., 0., 0.]
```
"""
def __init__(self,
loc,
atol=None,
rtol=None,
validate_args=False,
allow_nan_stats=True,
name="VectorDeterministic"):
"""Initialize a `VectorDeterministic` distribution on `R^k`, for `k >= 0`.
Note that there is only one point in `R^0`, the "point" `[]`. So if `k = 0`
then `self.prob([]) == 1`.
The `atol` and `rtol` parameters allow for some slack in `pmf`
computations, e.g. due to floating-point error.
```
pmf(x; loc)
= 1, if All[Abs(x - loc) <= atol + rtol * Abs(loc)],
= 0, otherwise
```
Args:
loc: Numeric `Tensor` of shape `[B1, ..., Bb, k]`, with `b >= 0`, `k >= 0`
The point (or batch of points) on which this distribution is supported.
atol: Non-negative `Tensor` of same `dtype` as `loc` and broadcastable
shape. The absolute tolerance for comparing closeness to `loc`.
Default is `0`.
rtol: Non-negative `Tensor` of same `dtype` as `loc` and broadcastable
shape. The relative tolerance for comparing closeness to `loc`.
Default is `0`.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
allow_nan_stats: Python `bool`, default `True`. When `True`, statistics
(e.g., mean, mode, variance) use the value "`NaN`" to indicate the
result is undefined. When `False`, an exception is raised if one or
more of the statistic's batch members are undefined.
name: Python `str` name prefixed to Ops created by this class.
"""
parameters = dict(locals())
super(VectorDeterministic, self).__init__(
loc,
atol=atol,
rtol=rtol,
is_vector=True,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
parameters=parameters,
name=name)
@classmethod
def _params_event_ndims(cls):
return dict(loc=1, atol=1, rtol=1)
def _batch_shape_tensor(self, loc=None):
return tf.broadcast_dynamic_shape(
tf.shape(self.loc if loc is None else loc),
tf.broadcast_dynamic_shape(tf.shape(self.atol),
tf.shape(self.rtol)))[:-1]
def _batch_shape(self):
return tf.broadcast_static_shape(
self.loc.shape,
tf.broadcast_static_shape(self.atol.shape, self.rtol.shape))[:-1]
def _event_shape_tensor(self, loc=None):
return tf.shape(self.loc if loc is None else loc)[-1:]
def _event_shape(self):
return self.loc.shape[-1:]
def _prob(self, x):
if self.validate_args:
is_vector_check = assert_util.assert_rank_at_least(x, 1)
right_vec_space_check = assert_util.assert_equal(
self.event_shape_tensor(),
tf.gather(tf.shape(x),
tf.rank(x) - 1),
message="Argument 'x' not defined in the same space R^k as this distribution"
)
with tf.control_dependencies([is_vector_check]):
with tf.control_dependencies([right_vec_space_check]):
x = tf.identity(x)
loc = tf.convert_to_tensor(self.loc)
return tf.cast(
tf.reduce_all(tf.abs(x - loc) <= self._slack(loc), axis=-1),
dtype=self.dtype)
@kullback_leibler.RegisterKL(_BaseDeterministic, distribution.Distribution)
def _kl_deterministic_distribution(a, b, name=None):
"""Calculate the batched KL divergence `KL(a || b)` with `a` Deterministic.
Args:
a: instance of a Deterministic distribution object.
b: instance of a Distribution distribution object.
name: (optional) Name to use for created operations. Default is
"kl_deterministic_distribution".
Returns:
Batchwise `KL(a || b)`.
"""
with tf.name_scope(name or "kl_deterministic_distribution"):
return -b.log_prob(a.loc)
| 34.802691 | 87 | 0.654555 |
e517d194b897be4daedda09aa4941aa79555c35a | 10,125 | py | Python | tests/test_validators_br.py | silexsistemas/validator-collection-br | 5a6940a07c435e1f9f6c371d9c1d4187b0ada5d0 | [
"MIT"
] | 2 | 2020-10-07T19:14:15.000Z | 2020-12-06T23:47:13.000Z | tests/test_validators_br.py | silexsistemas/validator-collection-br | 5a6940a07c435e1f9f6c371d9c1d4187b0ada5d0 | [
"MIT"
] | null | null | null | tests/test_validators_br.py | silexsistemas/validator-collection-br | 5a6940a07c435e1f9f6c371d9c1d4187b0ada5d0 | [
"MIT"
] | 1 | 2020-06-29T13:02:22.000Z | 2020-06-29T13:02:22.000Z | import pytest
from validator_collection_br import validators_br, errors_br
from validator_collection import errors
# malformed_lawsuits_numbers = ['15201201038073', '19890010109053', '19970011037802', '1020857820138200000', '10514093820138200000', '0056758-98.2017', '0062436-28.2016.8.19.000', '0078346-55.2003''0078821-61.2013', '0079413-74.2011', '008.09.2099979', '0083734-55.2011.8.19.000', '0084047-93.2017.8.05.000', '009.08.603715-1', '011.10.016102-3', '0114968-57.2013.8.20.0', '0114968-57.2013.8.20.000', '0120646-89.2007', '0123367-53.2017.8.05.000', '0123526-78.2012.8.26.010', '0124351-27.2009', '0126620-05.2010.8.26.010', '0134621-91.2015.8.05.000', '0137755-29.2015.8.05.000', '013958-23.2012', '013958-23.2012', '0146570-29.2012', '015.01..000150-2', '0151068-04.2014.8.19.000', '016.10.611108-1', '0163240-79.2011.8.26.010', '0164068-51.2006', '0172804482012/01', '0174361-32.2010.8.05.000', '0187146-35.2010', '0188528-29.2011', '01993355-70.2009.8.26.010', '0201338-57.2013.8.19.000', '0203102-23.2012.8.26.010', '0208229-10.2010', '0319066-26.2016', '032.2011.022.137-4', '0350717-52.2011', '0422056-32.2015', '0533540-62.2006', '0603715-96-2008', '0607852-95.2011', '0610207.44.2012', '0624529-50.2008', '0706581-07.2017.8.07.', '0713 13005841-3', '0713942-11.2007', '07176-54-06.2016.8.14.03', '0717890-86/2014', '0718855-35.2012', '0800109-22.2016.8.20.500', '0800190-33.2009', '0800265-77.2013.8.20.000', '10.110.805.320.148.260.00', '100.10.610064-', '1000331-43.2013.8.26.015', '1000652-26.2017-X', '1001389_x0013_82.2014.8.26.0011', '1001714-82.2016-01', '1001979-31.2015.8.26.000', '1002291-06.2015', '1002303-58.2014.8.26.000', '1003497-11.2016.8.26.004', '1003497-11.2016.8.26.004', '1004605-05.2013.8.26.036', '1004654-24.2011', '1004917-78.2013.8.26.036', '1005816-12.2015', '1007417-83.2014.8.26.0', '1007585-24.2016.8.26.010', '1008571-07.2014', '1009077-28.2017', '1010581-58.2013', '1012988-81.2015', '1016137-12-2015-01', '1016315-58.2015', '1020444-09.2014.8.26.056', '1022387-91.2014.8.26.055', '1024512-70.2013', '1024738-76.2015', '10314-16.2011 - cód. 7191', '1031478-15.2014', '1031645-61.8.26.0100', '1040129-36.2014', '1045764-61.2015', '1045978-23.2013', '1051437-69.2014', '1053564-46.2015', '1054631-77.2014', '1058724-83.2014.8.26.010', '1064382-25.2013', '1066864-43.2013', '1068654-28.2014', '1072883-65.2013', '1089219-13.2014.8.26.010', '1090251-82.2016', '1092882-67.2014', '110527-81.2014.8.26.0100', '1125855-07.2016', '164626-47.2011', '164626-47.2011', '2000889-13.2016.8.26.0', '2005 102305 7 ORD', '2005.542-63.2013.8.26.001', '2010.0007215-31', '2010.0007549-65', '2014.01.1.192579-8', '2015.14.1.002136-7', '2015.14.1.006175-6', '2015.14.1.008604-8', '2016.14.1.006604-4', '2016.16.1.000562-6', '2016.16.1.004832-4', '2016.16.1.007301-6', '2016.16.1.009376-6', '2016.16.1.011841-7', '2017.16.1.000001-6', '2025388-32/2014', '240654-18.2015', '2767-72.8.26.0654', '283212-73.2013', '284790-03.2015', '302459-35.2016', '4001449-28.2013.8.26.056', '4006551-81.2013', '4008248-79.2013', '465260-63.2014', '495508-80.2012', '5255449.64.2015.8.09.005', '583 00 2004 008628 9', '583 00 2005 069234 5', '583.07.2005.023123-8', '998.10.602.359', '999.11.606502-', 'kk0017042-122009.8.19.020']
well_formed_lawsuits_numbers = ['0000013-59.2013.8.05.0250', '0611925-60.2017.8.04.0001', '0004268-07.2016.8.06.0063',
'0005714-53.2008.8.18.0140']
malformed_lawsuits_numbers = ['0000023-59.2013.8.05.0250', '0611925-60.8888.8.04.0010', '7770000-07.2016.8.06.0063',
'0005714-53.2008.8.18.0000']
well_formed_cnpj_numbers = ['04.170.575/0001-03',
61198164000160, 58768284000140, 33448150000111, 8816067000100, 4540010000170,
4862600000110, 40303299000178, 48041735000190, '02340041000152', '09436686000132']
customers = ["DIRECIONAL JUDICIAL", "DIRECIONAL EXTRAJUDICIAL", "MRV JUDICIAL", "MRV EXTRAJUDICIAL", "PRECON", "BTM",
"SEMPRE EDITORA EXTRAJUDICIAL", "SANTO ANDRÉ JUDICIAL", "SANTO ANDRÉ EXTRAJUDICIAL", "KINEA JUDICIAL",
"KINEA EXTRAJUDICIAL", "ITAU", "SEMPRE EDITORA JUDICIAL"]
negotiation_status = ["CONTATO INICIAL", "EM ANDAMENTO", "AVANÇADA", "QUITAÇÃO INTEGRAL", "RENEGOCIAÇÃO EFETIVADA",
"RENEGOCIAÇÃO QUITADA", "INADIMPLÊNCIA APÓS ACORDO", "RECUSA NÃO TEM CONDIÇÕES",
"RECUSA NÃO TEM INTERESSE"]
deal = ["SIM", "NÃO"]
deal_type = ["PARCELADO", "À VISTA"]
customers_status = ["ADIMPLENTE", "INADIMPLENTE"]
suspended_action = ["SUSPENSO", "ATIVO"]
customers_lawyer = ["SIM", "NÃO"]
customer_guarantee = ["SIM", "NÃO"]
court = ["SUPREMO TRIBUNAL FEDERAL", "TRIBUNAL SUPERIOR DO TRABALHO", "CONSELHO SUPERIOR DA JUSTIÇA DO TRABALHO",
"SUPERIOR TRIBUNAL DE JUSTIÇA", "CONSELHO DA JUSTIÇA FEDERAL", "TRIBUNAL SUPERIOR ELEITORAL",
"SUPERIOR TRIBUNAL MILITAR", "TRIBUNAL REGIONAL FEDERAL", "TRIBUNAL DE JUSTIÇA", "TRIBUNAL REGIONAL ELEITORAL",
"TRIBUNAL DE JUSTIÇA MILITAR", "PROCON", "OAB", "PREFEITURA", "GOVERNO DO ESTADO", "ÓRGÃO PROFISSIONAL",
"OUTROS"]
state = ["AC", "AL", "AP", "AM", "BA", "CE", "DF", "ES", "GO", "MA", "MT", "MS", "MG", "PA", "PB", "PR", "PE", "PI",
"RJ", "RN", "RS", "RO", "RR", "SC", "SP", "SE", "TO"]
subpoena = ["SIM", "NÃO"]
pawn = ["SIM", "NÃO"]
patrimony_repossessed = ["VALOR EM CONTA", "VEÍCULO", "IMÓVEL", "DIREITOS AQUISITIVOS", "OUTROS"]
court_decision = ["NÃO", "PROCEDENTE", "PARCIALMENTE PROCEDENTE", "IMPROCEDENTE", "EXTINTO SEM JULGAMENTO DO MÉRITO",
"HOMOLOGADO ACORDO"]
appeal = ["SIM", "NÃO"]
res_judicata = ["SIM", "NÃO"]
cellphoneValidatorsTests = [
["33991749686", True],
["XXX", False],
["33 9 9174 - 7498", True],
["(37)982159000", True],
["37J982159000", False],
["+(55)31991749686", True],
["", False]
]
alphanumericValidatorsTests = [
["123456", True],
["XXX", True],
["XXX456", True],
["", False],
["....", False],
["asas.5464", False],
["", False]
]
class TestCPF:
well_formed_cpf = '020.388.410-80'
malformed_numeric_cpf = 2038841080
malformed_short_cpf = '20.388.410-80'
malformed_long_cpf = '0020.388.410-80'
malformed_mask_cpf = '02038841080'
malformed_digit_cpf = '020.388.410-00'
malformed_equal_digit_cpf = '333.333.333-33'
def test_cpf_formation(self):
assert self.well_formed_cpf == validators_br.cpf(self.well_formed_cpf)
def test_empty_cpf(self):
with pytest.raises(errors.EmptyValueError):
validators_br.cpf('')
def test_cpf_string_type(self):
with pytest.raises(errors_br.DataTypeError):
validators_br.cpf(self.malformed_numeric_cpf)
def test_cpf_too_short(self):
with pytest.raises(errors.MinimumLengthError):
validators_br.cpf(self.malformed_short_cpf)
def test_cpf_too_long(self):
with pytest.raises(errors.MaximumLengthError):
validators_br.cpf(self.malformed_long_cpf)
def test_cpf_wrong_mask(self):
with pytest.raises(errors_br.InvalidCpfMaskError):
validators_br.cpf(self.malformed_mask_cpf)
def test_cpf_digit(self):
with pytest.raises(errors_br.InvalidCpfError):
validators_br.cpf(self.malformed_digit_cpf)
def test_cpf_equal_digit(self):
with pytest.raises(errors_br.InvalidCpfEqualError):
validators_br.cpf(self.malformed_equal_digit_cpf)
class TestCNPJ:
well_formed_cnpj = '33.000.167/0001-01'
malformed_numeric_cnpj = 33000167000101
malformed_short_cnpj = '33.000.167/001-01'
malformed_long_cnpj = '033.000.167/0001-01'
malformed_mask_cnpj = '33000167000101'
malformed_digit_cnpj = '33.000.167/0001-02'
malformed_equal_digit_cnpj = '11.111.111/1111-11'
def test_cnpj_formation(self):
assert self.well_formed_cnpj == validators_br.cnpj(self.well_formed_cnpj)
def test_empty_cnpj(self):
with pytest.raises(errors.EmptyValueError):
validators_br.cnpj('')
def test_cnpj_string_type(self):
with pytest.raises(errors_br.DataTypeError):
validators_br.cnpj(self.malformed_numeric_cnpj)
def test_cnpj_too_short(self):
with pytest.raises(errors.MinimumLengthError):
validators_br.cnpj(self.malformed_short_cnpj)
def test_cnpj_too_long(self):
with pytest.raises(errors.MaximumLengthError):
validators_br.cnpj(self.malformed_long_cnpj)
def test_cnpj_wrong_mask(self):
with pytest.raises(errors_br.InvalidCnpjMaskError):
validators_br.cnpj(self.malformed_mask_cnpj)
def test_cnpj_digit(self):
with pytest.raises(errors_br.InvalidCnpjError):
validators_br.cnpj(self.malformed_digit_cnpj)
def test_cnpj_equal_digit(self):
with pytest.raises(errors_br.InvalidCnpjEqualError):
validators_br.cnpj(self.malformed_equal_digit_cnpj)
@pytest.mark.parametrize('value, fails, allow_empty', [
([], True, False),
(None, True, False),
('', True, False),
(set(), True, False),
((), True, False),
(['a'], True, False),
('a', True, False),
(1, True, False),
(set('a'), True, False),
('joao', True, False),
('joao carlos', False, False),
('joão carlos', False, False),
("joana d'arc", False, False),
("Joana D'arc", False, False),
("JOANA D'ARC", False, False),
('Catherine Zeta-Jones', False, False),
('joão paulo II', False, False),
])
def test_person_full_name(value, fails, allow_empty):
"""Test the none validator."""
if not fails:
validated = validators_br.person_full_name(value, allow_empty=allow_empty)
if not value and allow_empty:
assert validated is None
elif value:
assert validated is not None
else:
with pytest.raises((ValueError, TypeError)):
validated = validators_br.person_full_name(value, allow_empty=allow_empty)
| 52.190722 | 3,134 | 0.659358 |
0486c127eb46f7a17b270de0daa55426d25b860e | 1,670 | py | Python | pincer/objects/message/emoji.py | MithicSpirit/Pincer | 3e5aee5bc228a77caac59e07299d54e558b7f39d | [
"MIT"
] | 1 | 2021-11-16T05:18:42.000Z | 2021-11-16T05:18:42.000Z | pincer/objects/message/emoji.py | Seanpm2001-Discord/Pincer | a2c045f85f44712f3257e5cc50b3acacbd1302f9 | [
"MIT"
] | 1 | 2021-11-11T17:22:43.000Z | 2021-11-11T17:22:43.000Z | pincer/objects/message/emoji.py | mCodingLLC/Pincer | 29445067b438e11647cda2ae8294b38f0b2fbac8 | [
"MIT"
] | null | null | null | # Copyright Pincer 2021-Present
# Full MIT License can be found in `LICENSE` at the project root.
from __future__ import annotations
from dataclasses import dataclass
from typing import TYPE_CHECKING
from ...utils.api_object import APIObject
from ...utils.types import MISSING
if TYPE_CHECKING:
from typing import Optional, List
from ..user.user import User
from ..guild.role import Role
from ...utils.types import APINullable
from ...utils.snowflake import Snowflake
@dataclass
class Emoji(APIObject):
"""Representation of an emoji in a class.
Attributes
----------
id: Optional[:class:`~pincer.utils.snowflake.Snowflake`]
Emoji id
name: Optional[:class:`str`]
Emoji name
animated: APINullable[:class:`bool`]
Whether this emoji is animated
available: APINullable[:class:`bool`]
Whether this emoji can be used, may be false due to loss of Server
Boosts
managed: APINullable[:class:`bool`]
Whether this emoji is managed
require_colons: APINullable[:class:`bool`]
Whether this emoji must be wrapped in colons
roles: APINullable[List[:class:`~pincer.objects.guild.role.Role`]]
Roles allowed to use this emoji
user: APINullable[:class:`~pincer.objects.user.user.User`]
User that created this emoji
"""
name: Optional[str]
id: APINullable[Snowflake] = MISSING
animated: APINullable[bool] = MISSING
available: APINullable[bool] = MISSING
managed: APINullable[bool] = MISSING
require_colons: APINullable[bool] = MISSING
roles: APINullable[List[Role]] = MISSING
user: APINullable[User] = MISSING
| 30.363636 | 74 | 0.697006 |
7c1a430e83d9efbe5cecf9a742ab29e61b978d17 | 2,690 | py | Python | scripts/pylib/twister/scl.py | maxvankessel/zephyr | 769d91b922b736860244b22e25328d91d9a17657 | [
"Apache-2.0"
] | 6,224 | 2016-06-24T20:04:19.000Z | 2022-03-31T20:33:45.000Z | scripts/pylib/twister/scl.py | Conexiotechnologies/zephyr | fde24ac1f25d09eb9722ce4edc6e2d3f844b5bce | [
"Apache-2.0"
] | 32,027 | 2017-03-24T00:02:32.000Z | 2022-03-31T23:45:53.000Z | scripts/pylib/twister/scl.py | Conexiotechnologies/zephyr | fde24ac1f25d09eb9722ce4edc6e2d3f844b5bce | [
"Apache-2.0"
] | 4,374 | 2016-08-11T07:28:47.000Z | 2022-03-31T14:44:59.000Z | #! /usr/bin/python
#
# SPDX-License-Identifier: Apache-2.0
# Zephyr's Twister library
#
# pylint: disable=unused-import
#
# Set of code that other projects can also import to do things on
# Zephyr's sanity check testcases.
import logging
import yaml
try:
# Use the C LibYAML parser if available, rather than the Python parser.
# It's much faster.
from yaml import CLoader as Loader
from yaml import CSafeLoader as SafeLoader
from yaml import CDumper as Dumper
except ImportError:
from yaml import Loader, SafeLoader, Dumper
log = logging.getLogger("scl")
#
#
def yaml_load(filename):
"""
Safely load a YAML document
Follows recomendations from
https://security.openstack.org/guidelines/dg_avoid-dangerous-input-parsing-libraries.html.
:param str filename: filename to load
:raises yaml.scanner: On YAML scan issues
:raises: any other exception on file access erors
:return: dictionary representing the YAML document
"""
try:
with open(filename, 'r') as f:
return yaml.load(f, Loader=SafeLoader)
except yaml.scanner.ScannerError as e: # For errors parsing schema.yaml
mark = e.problem_mark
cmark = e.context_mark
log.error("%s:%d:%d: error: %s (note %s context @%s:%d:%d %s)",
mark.name, mark.line, mark.column, e.problem,
e.note, cmark.name, cmark.line, cmark.column, e.context)
raise
# If pykwalify is installed, then the validate functionw ill work --
# otherwise, it is a stub and we'd warn about it.
try:
import pykwalify.core
# Don't print error messages yourself, let us do it
logging.getLogger("pykwalify.core").setLevel(50)
def _yaml_validate(data, schema):
if not schema:
return
c = pykwalify.core.Core(source_data=data, schema_data=schema)
c.validate(raise_exception=True)
except ImportError as e:
log.warning("can't import pykwalify; won't validate YAML (%s)", e)
def _yaml_validate(data, schema):
pass
def yaml_load_verify(filename, schema):
"""
Safely load a testcase/sample yaml document and validate it
against the YAML schema, returing in case of success the YAML data.
:param str filename: name of the file to load and process
:param dict schema: loaded YAML schema (can load with :func:`yaml_load`)
# 'document.yaml' contains a single YAML document.
:raises yaml.scanner.ScannerError: on YAML parsing error
:raises pykwalify.errors.SchemaError: on Schema violation error
"""
# 'document.yaml' contains a single YAML document.
y = yaml_load(filename)
_yaml_validate(y, schema)
return y
| 32.409639 | 94 | 0.688476 |
38d3957a421a2016185f669b27dba9414c7b2aa2 | 2,304 | py | Python | IPython/qt/console/tests/test_kill_ring.py | pyarnold/ipython | c4797f7f069d0a974ddfa1e4251c7550c809dba0 | [
"BSD-3-Clause-Clear"
] | 1 | 2020-12-18T01:07:55.000Z | 2020-12-18T01:07:55.000Z | IPython/qt/console/tests/test_kill_ring.py | pyarnold/ipython | c4797f7f069d0a974ddfa1e4251c7550c809dba0 | [
"BSD-3-Clause-Clear"
] | null | null | null | IPython/qt/console/tests/test_kill_ring.py | pyarnold/ipython | c4797f7f069d0a974ddfa1e4251c7550c809dba0 | [
"BSD-3-Clause-Clear"
] | null | null | null | # Standard library imports
import unittest
# System library imports
from IPython.external.qt import QtGui
# Local imports
from IPython.qt.console.kill_ring import KillRing, QtKillRing
import IPython.testing.decorators as dec
setup = dec.skip_file_no_x11(__name__)
class TestKillRing(unittest.TestCase):
@classmethod
def setUpClass(cls):
""" Create the application for the test case.
"""
cls._app = QtGui.QApplication.instance()
if cls._app is None:
cls._app = QtGui.QApplication([])
cls._app.setQuitOnLastWindowClosed(False)
@classmethod
def tearDownClass(cls):
""" Exit the application.
"""
QtGui.QApplication.quit()
def test_generic(self):
""" Does the generic kill ring work?
"""
ring = KillRing()
self.assertTrue(ring.yank() is None)
self.assertTrue(ring.rotate() is None)
ring.kill('foo')
self.assertEqual(ring.yank(), 'foo')
self.assertTrue(ring.rotate() is None)
self.assertEqual(ring.yank(), 'foo')
ring.kill('bar')
self.assertEqual(ring.yank(), 'bar')
self.assertEqual(ring.rotate(), 'foo')
ring.clear()
self.assertTrue(ring.yank() is None)
self.assertTrue(ring.rotate() is None)
def test_qt_basic(self):
""" Does the Qt kill ring work?
"""
text_edit = QtGui.QPlainTextEdit()
ring = QtKillRing(text_edit)
ring.kill('foo')
ring.kill('bar')
ring.yank()
ring.rotate()
ring.yank()
self.assertEqual(text_edit.toPlainText(), 'foobar')
text_edit.clear()
ring.kill('baz')
ring.yank()
ring.rotate()
ring.rotate()
ring.rotate()
self.assertEqual(text_edit.toPlainText(), 'foo')
def test_qt_cursor(self):
""" Does the Qt kill ring maintain state with cursor movement?
"""
text_edit = QtGui.QPlainTextEdit()
ring = QtKillRing(text_edit)
ring.kill('foo')
ring.kill('bar')
ring.yank()
text_edit.moveCursor(QtGui.QTextCursor.Left)
ring.rotate()
self.assertEqual(text_edit.toPlainText(), 'bar')
if __name__ == '__main__':
import nose
nose.main()
| 25.88764 | 70 | 0.601563 |
984fed1da8014c70d629913fac59e209c0456bdf | 7,604 | py | Python | dex-net/apps/generate-dataset-canny.py | qingchenkanlu/PointNetGPD | 636bcd1e6809c83baff7868df3d65502d4864a75 | [
"MIT"
] | 1 | 2020-04-14T16:09:59.000Z | 2020-04-14T16:09:59.000Z | dex-net/apps/generate-dataset-canny.py | qingchenkanlu/PointNetGPD | 636bcd1e6809c83baff7868df3d65502d4864a75 | [
"MIT"
] | null | null | null | dex-net/apps/generate-dataset-canny.py | qingchenkanlu/PointNetGPD | 636bcd1e6809c83baff7868df3d65502d4864a75 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Author : Hongzhuo Liang
# E-mail : liang@informatik.uni-hamburg.de
# Description:
# Date : 20/05/2018 2:45 PM
# File Name : generate-dataset-canny.py
import numpy as np
import sys
import pickle
from dexnet.grasping.quality import PointGraspMetrics3D
from dexnet.grasping import GaussianGraspSampler, AntipodalGraspSampler, UniformGraspSampler, GpgGraspSampler
from dexnet.grasping import RobotGripper, GraspableObject3D, GraspQualityConfigFactory, PointGraspSampler
import dexnet
from autolab_core import YamlConfig
from meshpy.obj_file import ObjFile
from meshpy.sdf_file import SdfFile
import os
import multiprocessing
import matplotlib.pyplot as plt
plt.switch_backend('agg') # for the convenient of run on remote computer
def get_file_name(file_dir_):
file_list = []
for root, dirs, files in os.walk(file_dir_):
if root.count('/') == file_dir_.count('/') + 1:
file_list.append(root)
file_list.sort()
return file_list
def do_job(i):
object_name = file_list_all[i][len(home_dir) + 35:]
good_grasp = multiprocessing.Manager().list()
p_set = [multiprocessing.Process(target=worker, args=(i, 100, 20, good_grasp)) for _ in
range(50)] # grasp_amount per friction: 20*40
[p.start() for p in p_set]
[p.join() for p in p_set]
good_grasp = list(good_grasp)
good_grasp_file_name = "./generated_grasps/{}_{}_{}".format(filename_prefix, str(object_name), str(len(good_grasp)))
with open(good_grasp_file_name + '.pickle', 'wb') as f:
pickle.dump(good_grasp, f)
tmp = []
for grasp in good_grasp:
grasp_config = grasp[0].configuration
score_friction = grasp[1]
score_canny = grasp[2]
tmp.append(np.concatenate([grasp_config, [score_friction, score_canny]]))
np.save(good_grasp_file_name + '.npy', np.array(tmp))
print("finished job ", object_name)
def worker(i, sample_nums, grasp_amount, good_grasp):
object_name = file_list_all[i][len(home_dir) + 35:]
print('a worker of task {} start'.format(object_name))
yaml_config = YamlConfig(home_dir + "/code/grasp-pointnet/dex-net/test/config.yaml")
gripper_name = 'robotiq_85'
gripper = RobotGripper.load(gripper_name, home_dir + "/code/grasp-pointnet/dex-net/data/grippers")
grasp_sample_method = "antipodal"
if grasp_sample_method == "uniform":
ags = UniformGraspSampler(gripper, yaml_config)
elif grasp_sample_method == "gaussian":
ags = GaussianGraspSampler(gripper, yaml_config)
elif grasp_sample_method == "antipodal":
ags = AntipodalGraspSampler(gripper, yaml_config)
elif grasp_sample_method == "gpg":
ags = GpgGraspSampler(gripper, yaml_config)
elif grasp_sample_method == "point":
ags = PointGraspSampler(gripper, yaml_config)
else:
raise NameError("Can't support this sampler")
print("Log: do job", i)
if os.path.exists(str(file_list_all[i]) + "/google_512k/nontextured.obj"):
of = ObjFile(str(file_list_all[i]) + "/google_512k/nontextured.obj")
sf = SdfFile(str(file_list_all[i]) + "/google_512k/nontextured.sdf")
else:
print("can't find any obj or sdf file!")
raise NameError("can't find any obj or sdf file!")
mesh = of.read()
sdf = sf.read()
obj = GraspableObject3D(sdf, mesh)
print("Log: opened object", i + 1, object_name)
force_closure_quality_config = {}
canny_quality_config = {}
fc_list_sub1 = np.arange(2.0, 0.75, -0.4)
fc_list_sub2 = np.arange(0.5, 0.36, -0.05)
fc_list = np.concatenate([fc_list_sub1, fc_list_sub2])
for value_fc in fc_list:
value_fc = round(value_fc, 2)
yaml_config['metrics']['force_closure']['friction_coef'] = value_fc
yaml_config['metrics']['robust_ferrari_canny']['friction_coef'] = value_fc
force_closure_quality_config[value_fc] = GraspQualityConfigFactory.create_config(
yaml_config['metrics']['force_closure'])
canny_quality_config[value_fc] = GraspQualityConfigFactory.create_config(
yaml_config['metrics']['robust_ferrari_canny'])
good_count_perfect = np.zeros(len(fc_list))
count = 0
minimum_grasp_per_fc = grasp_amount
while np.sum(good_count_perfect < minimum_grasp_per_fc) != 0:
grasps = ags.generate_grasps(obj, target_num_grasps=sample_nums, grasp_gen_mult=10,
vis=False, random_approach_angle=True)
count += len(grasps)
for j in grasps:
tmp, is_force_closure = False, False
for ind_, value_fc in enumerate(fc_list):
value_fc = round(value_fc, 2)
tmp = is_force_closure
is_force_closure = PointGraspMetrics3D.grasp_quality(j, obj,
force_closure_quality_config[value_fc], vis=False)
if tmp and not is_force_closure:
if good_count_perfect[ind_ - 1] < minimum_grasp_per_fc:
canny_quality = PointGraspMetrics3D.grasp_quality(j, obj,
canny_quality_config[
round(fc_list[ind_ - 1], 2)],
vis=False)
good_grasp.append((j, round(fc_list[ind_ - 1], 2), canny_quality))
good_count_perfect[ind_ - 1] += 1
break
elif is_force_closure and value_fc == fc_list[-1]:
if good_count_perfect[ind_] < minimum_grasp_per_fc:
canny_quality = PointGraspMetrics3D.grasp_quality(j, obj,
canny_quality_config[value_fc], vis=False)
good_grasp.append((j, value_fc, canny_quality))
good_count_perfect[ind_] += 1
break
print('Object:{} GoodGrasp:{}'.format(object_name, good_count_perfect))
object_name_len = len(object_name)
object_name_ = str(object_name) + " " * (25 - object_name_len)
if count == 0:
good_grasp_rate = 0
else:
good_grasp_rate = len(good_grasp) / count
print('Gripper:{} Object:{} Rate:{:.4f} {}/{}'.
format(gripper_name, object_name_, good_grasp_rate, len(good_grasp), count))
if __name__ == '__main__':
if len(sys.argv) > 1:
filename_prefix = sys.argv[1]
else:
filename_prefix = "default"
home_dir = os.environ['HOME']
file_dir = home_dir + "/dataset/ycb_meshes_google/objects"
file_list_all = get_file_name(file_dir)
object_numbers = file_list_all.__len__()
job_list = np.arange(object_numbers)
job_list = list(job_list)
pool_size = 1 # number of jobs did at same time
assert (pool_size <= len(job_list))
# Initialize pool
pool = []
for _ in range(pool_size):
job_i = job_list.pop(0)
pool.append(multiprocessing.Process(target=do_job, args=(job_i,)))
[p.start() for p in pool]
# refill
while len(job_list) > 0:
for ind, p in enumerate(pool):
if not p.is_alive():
pool.pop(ind)
job_i = job_list.pop(0)
p = multiprocessing.Process(target=do_job, args=(job_i,))
p.start()
pool.append(p)
break
print('All job done.')
| 43.204545 | 120 | 0.621515 |
30a2e88a1fbb60eea5f999d0d86971889b711209 | 1,243 | py | Python | shell/redirect.py | utep-cs-systems-courses/os-shell-jrlopez14 | c109f6724947571aa8fd00200a8941f6c74f0b5d | [
"BSD-3-Clause"
] | null | null | null | shell/redirect.py | utep-cs-systems-courses/os-shell-jrlopez14 | c109f6724947571aa8fd00200a8941f6c74f0b5d | [
"BSD-3-Clause"
] | null | null | null | shell/redirect.py | utep-cs-systems-courses/os-shell-jrlopez14 | c109f6724947571aa8fd00200a8941f6c74f0b5d | [
"BSD-3-Clause"
] | 1 | 2021-03-01T05:09:42.000Z | 2021-03-01T05:09:42.000Z | #! /usr/bin/env python3
import os
# Redirects command output to specified location
# If True, redirections were handled properly, if any.
def output_redirect(args):
if '>' in args:
goes_into_index = args.index('>')
# Removes the redirection part of command.
args.pop(goes_into_index)
output_file = args.pop(goes_into_index)
# Cannot have multiple output redirections in command.
if '>' not in args:
os.close(1)
os.open(output_file, os.O_CREAT | os.O_WRONLY)
os.set_inheritable(1,True)
return True
else:
return False
return True
# Redirects command input to specified location.
# If True, redirections were handled properly, if any.
def input_redirect(args):
if '<' in args:
from_index = args.index('<')
# Removes the redirection part of command.
args.pop(from_index)
input_file = args.pop(from_index)
# Cannot have multiple input redirections in command.
if '<' not in args:
os.close(0)
os.open(input_file, os.O_RDONLY)
os.set_inheritable(0,True)
return True
else:
return False
return True
| 30.317073 | 62 | 0.612228 |
78cab8e9572a918d95f93caa9c3e3a72ec6fd42f | 10,853 | py | Python | tests/theano/test_relations.py | josephwillard/symbolic-pymc | 7bef08dd572c3ddc32ddc8e8e3c0b1809b4ce654 | [
"Apache-2.0"
] | 59 | 2019-02-16T21:07:48.000Z | 2022-03-09T01:01:45.000Z | tests/theano/test_relations.py | josephwillard/symbolic-pymc | 7bef08dd572c3ddc32ddc8e8e3c0b1809b4ce654 | [
"Apache-2.0"
] | 56 | 2019-02-20T09:06:04.000Z | 2021-01-08T21:22:23.000Z | tests/theano/test_relations.py | josephwillard/symbolic-pymc | 7bef08dd572c3ddc32ddc8e8e3c0b1809b4ce654 | [
"Apache-2.0"
] | 9 | 2019-02-22T06:22:31.000Z | 2021-07-05T10:05:35.000Z | import pytest
import numpy as np
import theano
import theano.tensor as tt
from functools import partial
from unification import var
from etuples import etuple, etuplize
from kanren import run, eq
from kanren.core import lall
from kanren.graph import reduceo, walko, applyo
from symbolic_pymc.theano.meta import mt
from symbolic_pymc.theano.opt import eval_and_reify_meta
from symbolic_pymc.theano.random_variables import observed, NormalRV, HalfCauchyRV, MvNormalRV
from symbolic_pymc.relations.theano import non_obs_walko
from symbolic_pymc.relations.theano.conjugates import conjugate
from symbolic_pymc.relations.theano.distributions import scale_loc_transform, constant_neq
from symbolic_pymc.relations.theano.linalg import normal_normal_regression, normal_qr_transform
def test_constant_neq():
q_lv = var()
res = run(0, q_lv, eq(q_lv, mt(1)), constant_neq(q_lv, np.array(1.0)))
assert not res
# TODO: If `constant_neq` was a true constraint, this would work.
# res = run(0, q_lv, constant_neq(q_lv, np.array(1.0)), eq(q_lv, mt(1)))
# assert not res
# TODO: If `constant_neq` was a true constraint, this would work.
# res = run(0, q_lv, constant_neq(q_lv, np.array(1.0)), eq(q_lv, mt(2)))
# assert res == (mt(2),)
res = run(0, q_lv, eq(q_lv, mt(2)), constant_neq(q_lv, np.array(1.0)))
assert res == (mt(2),)
def test_scale_loc_transform():
tt.config.compute_test_value = "ignore"
rand_state = theano.shared(np.random.RandomState())
mu_a = NormalRV(0.0, 100 ** 2, name="mu_a", rng=rand_state)
sigma_a = HalfCauchyRV(5, name="sigma_a", rng=rand_state)
mu_b = NormalRV(0.0, 100 ** 2, name="mu_b", rng=rand_state)
sigma_b = HalfCauchyRV(5, name="sigma_b", rng=rand_state)
county_idx = np.r_[1, 1, 2, 3]
# We want the following for a, b:
# N(m, S) -> m + N(0, 1) * S
a = NormalRV(mu_a, sigma_a, size=(len(county_idx),), name="a", rng=rand_state)
b = NormalRV(mu_b, sigma_b, size=(len(county_idx),), name="b", rng=rand_state)
radon_est = a[county_idx] + b[county_idx] * 7
eps = HalfCauchyRV(5, name="eps", rng=rand_state)
radon_like = NormalRV(radon_est, eps, name="radon_like", rng=rand_state)
radon_like_rv = observed(tt.as_tensor_variable(np.r_[1.0, 2.0, 3.0, 4.0]), radon_like)
q_lv = var()
(expr_graph,) = run(
1, q_lv, non_obs_walko(partial(reduceo, scale_loc_transform), radon_like_rv, q_lv)
)
radon_like_rv_opt = expr_graph.reify()
assert radon_like_rv_opt.owner.op == observed
radon_like_opt = radon_like_rv_opt.owner.inputs[1]
radon_est_opt = radon_like_opt.owner.inputs[0]
# These should now be `tt.add(mu_*, ...)` outputs.
a_opt = radon_est_opt.owner.inputs[0].owner.inputs[0]
b_opt = radon_est_opt.owner.inputs[1].owner.inputs[0].owner.inputs[0]
# Make sure NormalRV gets replaced with an addition
assert a_opt.owner.op == tt.add
assert b_opt.owner.op == tt.add
# Make sure the first term in the addition is the old NormalRV mean
mu_a_opt = a_opt.owner.inputs[0].owner.inputs[0]
assert "mu_a" == mu_a_opt.name == mu_a.name
mu_b_opt = b_opt.owner.inputs[0].owner.inputs[0]
assert "mu_b" == mu_b_opt.name == mu_b.name
# Make sure the second term in the addition is the standard NormalRV times
# the old std. dev.
assert a_opt.owner.inputs[1].owner.op == tt.mul
assert b_opt.owner.inputs[1].owner.op == tt.mul
sigma_a_opt = a_opt.owner.inputs[1].owner.inputs[0].owner.inputs[0]
assert sigma_a_opt.owner.op == sigma_a.owner.op
sigma_b_opt = b_opt.owner.inputs[1].owner.inputs[0].owner.inputs[0]
assert sigma_b_opt.owner.op == sigma_b.owner.op
a_std_norm_opt = a_opt.owner.inputs[1].owner.inputs[1]
assert a_std_norm_opt.owner.op == NormalRV
assert a_std_norm_opt.owner.inputs[0].data == 0.0
assert a_std_norm_opt.owner.inputs[1].data == 1.0
b_std_norm_opt = b_opt.owner.inputs[1].owner.inputs[1]
assert b_std_norm_opt.owner.op == NormalRV
assert b_std_norm_opt.owner.inputs[0].data == 0.0
assert b_std_norm_opt.owner.inputs[1].data == 1.0
def test_mvnormal_conjugate():
"""Test that we can produce the closed-form distribution for the conjugate
multivariate normal-regression with normal-prior model.
"""
# import symbolic_pymc.theano.meta as tm
#
# tm.load_dispatcher()
tt.config.cxx = ""
tt.config.compute_test_value = "ignore"
a_tt = tt.vector("a")
R_tt = tt.matrix("R")
F_t_tt = tt.matrix("F")
V_tt = tt.matrix("V")
a_tt.tag.test_value = np.r_[1.0, 0.0]
R_tt.tag.test_value = np.diag([10.0, 10.0])
F_t_tt.tag.test_value = np.c_[-2.0, 1.0]
V_tt.tag.test_value = np.diag([0.5])
beta_rv = MvNormalRV(a_tt, R_tt, name="\\beta")
E_y_rv = F_t_tt.dot(beta_rv)
Y_rv = MvNormalRV(E_y_rv, V_tt, name="Y")
y_tt = tt.as_tensor_variable(np.r_[-3.0])
y_tt.name = "y"
Y_obs = observed(y_tt, Y_rv)
q_lv = var()
(expr_graph,) = run(1, q_lv, walko(conjugate, Y_obs, q_lv))
fgraph_opt = expr_graph.eval_obj
fgraph_opt_tt = fgraph_opt.reify()
# Check that the SSE has decreased from prior to posterior.
# TODO: Use a better test.
beta_prior_mean_val = a_tt.tag.test_value
F_val = F_t_tt.tag.test_value
beta_post_mean_val = fgraph_opt_tt.owner.inputs[0].tag.test_value
priorp_err = np.square(y_tt.data - F_val.dot(beta_prior_mean_val)).sum()
postp_err = np.square(y_tt.data - F_val.dot(beta_post_mean_val)).sum()
# First, make sure the prior and posterior means are simply not equal.
with pytest.raises(AssertionError):
np.testing.assert_array_equal(priorp_err, postp_err)
# Now, make sure there's a decrease (relative to the observed point).
np.testing.assert_array_less(postp_err, priorp_err)
@pytest.mark.xfail(strict=True)
def test_normal_normal_regression():
tt.config.compute_test_value = "ignore"
theano.config.cxx = ""
np.random.seed(9283)
N = 10
M = 3
a_tt = tt.vector("a")
R_tt = tt.vector("R")
X_tt = tt.matrix("X")
V_tt = tt.vector("V")
a_tt.tag.test_value = np.random.normal(size=M)
R_tt.tag.test_value = np.abs(np.random.normal(size=M))
X = np.random.normal(10, 1, size=N)
X = np.c_[np.ones(10), X, X * X]
X_tt.tag.test_value = X
V_tt.tag.test_value = np.ones(N)
beta_rv = NormalRV(a_tt, R_tt, name="\\beta")
E_y_rv = X_tt.dot(beta_rv)
E_y_rv.name = "E_y"
Y_rv = NormalRV(E_y_rv, V_tt, name="Y")
y_tt = tt.as_tensor_variable(Y_rv.tag.test_value)
y_tt.name = "y"
y_obs_rv = observed(y_tt, Y_rv)
y_obs_rv.name = "y_obs"
#
# Use the relation with identify/match `Y`, `X` and `beta`.
#
y_args_tail_lv, b_args_tail_lv = var(), var()
beta_lv = var()
y_args_lv, y_lv, Y_lv, X_lv = var(), var(), var(), var()
(res,) = run(
1,
(beta_lv, y_args_tail_lv, b_args_tail_lv),
applyo(mt.observed, y_args_lv, y_obs_rv),
eq(y_args_lv, (y_lv, Y_lv)),
normal_normal_regression(Y_lv, X_lv, beta_lv, y_args_tail_lv, b_args_tail_lv),
)
# TODO FIXME: This would work if non-op parameters (e.g. names) were covered by
# `operator`/`car`. See `TheanoMetaOperator`.
assert res[0].eval_obj.obj == beta_rv
assert res[0] == etuplize(beta_rv)
assert res[1] == etuplize(Y_rv)[2:]
assert res[2] == etuplize(beta_rv)[1:]
#
# Use the relation with to produce `Y` from given `X` and `beta`.
#
X_new_mt = mt(tt.eye(N, M))
beta_new_mt = mt(NormalRV(0, 1, size=M))
Y_args_cdr_mt = etuplize(Y_rv)[2:]
Y_lv = var()
(res,) = run(1, Y_lv, normal_normal_regression(Y_lv, X_new_mt, beta_new_mt, Y_args_cdr_mt))
Y_out_mt = res.eval_obj
Y_new_mt = etuple(mt.NormalRV, mt.dot(X_new_mt, beta_new_mt)) + Y_args_cdr_mt
Y_new_mt = Y_new_mt.eval_obj
assert Y_out_mt == Y_new_mt
@pytest.mark.xfail(strict=True)
def test_normal_qr_transform():
np.random.seed(9283)
N = 10
M = 3
X_tt = tt.matrix("X")
X = np.random.normal(10, 1, size=N)
X = np.c_[np.ones(10), X, X * X]
X_tt.tag.test_value = X
V_tt = tt.vector("V")
V_tt.tag.test_value = np.ones(N)
a_tt = tt.vector("a")
R_tt = tt.vector("R")
a_tt.tag.test_value = np.random.normal(size=M)
R_tt.tag.test_value = np.abs(np.random.normal(size=M))
beta_rv = NormalRV(a_tt, R_tt, name="\\beta")
E_y_rv = X_tt.dot(beta_rv)
E_y_rv.name = "E_y"
Y_rv = NormalRV(E_y_rv, V_tt, name="Y")
y_tt = tt.as_tensor_variable(Y_rv.tag.test_value)
y_tt.name = "y"
y_obs_rv = observed(y_tt, Y_rv)
y_obs_rv.name = "y_obs"
(res,) = run(1, var("q"), normal_qr_transform(y_obs_rv, var("q")))
new_node = {eval_and_reify_meta(k): eval_and_reify_meta(v) for k, v in res}
# Make sure the old-to-new `beta` conversion is correct.
t_Q, t_R = np.linalg.qr(X)
Coef_new_value = np.linalg.inv(t_R)
np.testing.assert_array_almost_equal(
Coef_new_value, new_node[beta_rv].owner.inputs[0].tag.test_value
)
# Make sure the new `beta_tilde` has the right standard normal distribution
# parameters.
beta_tilde_node = new_node[beta_rv].owner.inputs[1]
np.testing.assert_array_almost_equal(
np.r_[0.0, 0.0, 0.0], beta_tilde_node.owner.inputs[0].tag.test_value
)
np.testing.assert_array_almost_equal(
np.r_[1.0, 1.0, 1.0], beta_tilde_node.owner.inputs[1].tag.test_value
)
Y_new = new_node[y_obs_rv].owner.inputs[1]
assert Y_new.owner.inputs[0].owner.inputs[1] == beta_tilde_node
np.testing.assert_array_almost_equal(t_Q, Y_new.owner.inputs[0].owner.inputs[0].tag.test_value)
def test_basic_scan_transform():
def f_pow2(x_tm1):
return 2 * x_tm1
state = theano.tensor.scalar("state")
n_steps = theano.tensor.iscalar("nsteps")
output, updates = theano.scan(
f_pow2, [], state, [], n_steps=n_steps, truncate_gradient=-1, go_backwards=False
)
assert np.array_equal(output.eval({state: 1.0, n_steps: 4}), np.r_[2.0, 4.0, 8.0, 16.0])
def mul_trans(in_expr, out_expr):
"""Equate `2 * x` with `5 * x` in a Theano `scan`.
I.e. from left-to-right, replace `2 * x[t-1]` with `5 * x[t-1]`.
"""
arg_lv = var()
inputs_lv, info_lv = var(), var()
in_scan_lv = mt.Scan(inputs_lv, [mt.mul(2, arg_lv)], info_lv)
out_scan_lv = mt.Scan(inputs_lv, [mt.mul(5, arg_lv)], info_lv)
return lall(eq(in_expr, in_scan_lv), eq(out_expr, out_scan_lv))
q_lv = var()
(output_mt,) = run(1, q_lv, walko(partial(reduceo, mul_trans), output, q_lv))
output_new = output_mt.eval_obj.reify()
assert output_new != output
assert np.array_equal(output_new.eval({state: 1.0, n_steps: 4}), np.r_[5.0, 25.0, 125.0, 625.0])
| 33.809969 | 100 | 0.668018 |
a6d983a2c74575f160470d1e0c188e34fbcfbc0c | 74,892 | py | Python | kubernetes/client/apis/coordination_v1beta1_api.py | iamneha/python | 5b208a1a49a8d6f8bbab28bcc226b9ef793bcbd0 | [
"Apache-2.0"
] | 1 | 2019-02-17T15:28:39.000Z | 2019-02-17T15:28:39.000Z | kubernetes/client/apis/coordination_v1beta1_api.py | iamneha/python | 5b208a1a49a8d6f8bbab28bcc226b9ef793bcbd0 | [
"Apache-2.0"
] | null | null | null | kubernetes/client/apis/coordination_v1beta1_api.py | iamneha/python | 5b208a1a49a8d6f8bbab28bcc226b9ef793bcbd0 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.13.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..api_client import ApiClient
class CoordinationV1beta1Api(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_namespaced_lease(self, namespace, body, **kwargs):
"""
create a Lease
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_namespaced_lease(namespace, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1beta1Lease body: (required)
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1beta1Lease
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_namespaced_lease_with_http_info(namespace, body, **kwargs)
else:
(data) = self.create_namespaced_lease_with_http_info(namespace, body, **kwargs)
return data
def create_namespaced_lease_with_http_info(self, namespace, body, **kwargs):
"""
create a Lease
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_namespaced_lease_with_http_info(namespace, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1beta1Lease body: (required)
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1beta1Lease
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'body', 'include_uninitialized', 'pretty', 'dry_run']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_namespaced_lease" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `create_namespaced_lease`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_namespaced_lease`")
collection_formats = {}
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = []
if 'include_uninitialized' in params:
query_params.append(('includeUninitialized', params['include_uninitialized']))
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
if 'dry_run' in params:
query_params.append(('dryRun', params['dry_run']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/coordination.k8s.io/v1beta1/namespaces/{namespace}/leases', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1Lease',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_collection_namespaced_lease(self, namespace, **kwargs):
"""
delete collection of Lease
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_collection_namespaced_lease(namespace, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_collection_namespaced_lease_with_http_info(namespace, **kwargs)
else:
(data) = self.delete_collection_namespaced_lease_with_http_info(namespace, **kwargs)
return data
def delete_collection_namespaced_lease_with_http_info(self, namespace, **kwargs):
"""
delete collection of Lease
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_collection_namespaced_lease_with_http_info(namespace, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'include_uninitialized', 'pretty', '_continue', 'field_selector', 'label_selector', 'limit', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_collection_namespaced_lease" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `delete_collection_namespaced_lease`")
collection_formats = {}
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = []
if 'include_uninitialized' in params:
query_params.append(('includeUninitialized', params['include_uninitialized']))
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
if '_continue' in params:
query_params.append(('continue', params['_continue']))
if 'field_selector' in params:
query_params.append(('fieldSelector', params['field_selector']))
if 'label_selector' in params:
query_params.append(('labelSelector', params['label_selector']))
if 'limit' in params:
query_params.append(('limit', params['limit']))
if 'resource_version' in params:
query_params.append(('resourceVersion', params['resource_version']))
if 'timeout_seconds' in params:
query_params.append(('timeoutSeconds', params['timeout_seconds']))
if 'watch' in params:
query_params.append(('watch', params['watch']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/coordination.k8s.io/v1beta1/namespaces/{namespace}/leases', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_namespaced_lease(self, name, namespace, **kwargs):
"""
delete a Lease
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_namespaced_lease(name, namespace, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the Lease (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param V1DeleteOptions body:
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_namespaced_lease_with_http_info(name, namespace, **kwargs)
else:
(data) = self.delete_namespaced_lease_with_http_info(name, namespace, **kwargs)
return data
def delete_namespaced_lease_with_http_info(self, name, namespace, **kwargs):
"""
delete a Lease
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_namespaced_lease_with_http_info(name, namespace, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the Lease (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param V1DeleteOptions body:
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'pretty', 'body', 'dry_run', 'grace_period_seconds', 'orphan_dependents', 'propagation_policy']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_namespaced_lease" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `delete_namespaced_lease`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `delete_namespaced_lease`")
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
if 'dry_run' in params:
query_params.append(('dryRun', params['dry_run']))
if 'grace_period_seconds' in params:
query_params.append(('gracePeriodSeconds', params['grace_period_seconds']))
if 'orphan_dependents' in params:
query_params.append(('orphanDependents', params['orphan_dependents']))
if 'propagation_policy' in params:
query_params.append(('propagationPolicy', params['propagation_policy']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/coordination.k8s.io/v1beta1/namespaces/{namespace}/leases/{name}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_api_resources(self, **kwargs):
"""
get available resources
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_api_resources(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: V1APIResourceList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_api_resources_with_http_info(**kwargs)
else:
(data) = self.get_api_resources_with_http_info(**kwargs)
return data
def get_api_resources_with_http_info(self, **kwargs):
"""
get available resources
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_api_resources_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: V1APIResourceList
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_api_resources" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/coordination.k8s.io/v1beta1/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1APIResourceList',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_lease_for_all_namespaces(self, **kwargs):
"""
list or watch objects of kind Lease
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_lease_for_all_namespaces(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str pretty: If 'true', then the output is pretty printed.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1beta1LeaseList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_lease_for_all_namespaces_with_http_info(**kwargs)
else:
(data) = self.list_lease_for_all_namespaces_with_http_info(**kwargs)
return data
def list_lease_for_all_namespaces_with_http_info(self, **kwargs):
"""
list or watch objects of kind Lease
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_lease_for_all_namespaces_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str pretty: If 'true', then the output is pretty printed.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1beta1LeaseList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['_continue', 'field_selector', 'include_uninitialized', 'label_selector', 'limit', 'pretty', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_lease_for_all_namespaces" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if '_continue' in params:
query_params.append(('continue', params['_continue']))
if 'field_selector' in params:
query_params.append(('fieldSelector', params['field_selector']))
if 'include_uninitialized' in params:
query_params.append(('includeUninitialized', params['include_uninitialized']))
if 'label_selector' in params:
query_params.append(('labelSelector', params['label_selector']))
if 'limit' in params:
query_params.append(('limit', params['limit']))
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
if 'resource_version' in params:
query_params.append(('resourceVersion', params['resource_version']))
if 'timeout_seconds' in params:
query_params.append(('timeoutSeconds', params['timeout_seconds']))
if 'watch' in params:
query_params.append(('watch', params['watch']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/coordination.k8s.io/v1beta1/leases', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1LeaseList',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_namespaced_lease(self, namespace, **kwargs):
"""
list or watch objects of kind Lease
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_namespaced_lease(namespace, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1beta1LeaseList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_namespaced_lease_with_http_info(namespace, **kwargs)
else:
(data) = self.list_namespaced_lease_with_http_info(namespace, **kwargs)
return data
def list_namespaced_lease_with_http_info(self, namespace, **kwargs):
"""
list or watch objects of kind Lease
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_namespaced_lease_with_http_info(namespace, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str pretty: If 'true', then the output is pretty printed.
:param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1beta1LeaseList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'include_uninitialized', 'pretty', '_continue', 'field_selector', 'label_selector', 'limit', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_namespaced_lease" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `list_namespaced_lease`")
collection_formats = {}
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = []
if 'include_uninitialized' in params:
query_params.append(('includeUninitialized', params['include_uninitialized']))
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
if '_continue' in params:
query_params.append(('continue', params['_continue']))
if 'field_selector' in params:
query_params.append(('fieldSelector', params['field_selector']))
if 'label_selector' in params:
query_params.append(('labelSelector', params['label_selector']))
if 'limit' in params:
query_params.append(('limit', params['limit']))
if 'resource_version' in params:
query_params.append(('resourceVersion', params['resource_version']))
if 'timeout_seconds' in params:
query_params.append(('timeoutSeconds', params['timeout_seconds']))
if 'watch' in params:
query_params.append(('watch', params['watch']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/coordination.k8s.io/v1beta1/namespaces/{namespace}/leases', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1LeaseList',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_namespaced_lease(self, name, namespace, body, **kwargs):
"""
partially update the specified Lease
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_namespaced_lease(name, namespace, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the Lease (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1beta1Lease
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_namespaced_lease_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.patch_namespaced_lease_with_http_info(name, namespace, body, **kwargs)
return data
def patch_namespaced_lease_with_http_info(self, name, namespace, body, **kwargs):
"""
partially update the specified Lease
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_namespaced_lease_with_http_info(name, namespace, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the Lease (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1beta1Lease
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty', 'dry_run']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_namespaced_lease" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_namespaced_lease`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `patch_namespaced_lease`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_namespaced_lease`")
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
if 'dry_run' in params:
query_params.append(('dryRun', params['dry_run']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/coordination.k8s.io/v1beta1/namespaces/{namespace}/leases/{name}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1Lease',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def read_namespaced_lease(self, name, namespace, **kwargs):
"""
read the specified Lease
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.read_namespaced_lease(name, namespace, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the Lease (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param bool exact: Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'.
:param bool export: Should this value be exported. Export strips fields that a user can not specify.
:return: V1beta1Lease
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.read_namespaced_lease_with_http_info(name, namespace, **kwargs)
else:
(data) = self.read_namespaced_lease_with_http_info(name, namespace, **kwargs)
return data
def read_namespaced_lease_with_http_info(self, name, namespace, **kwargs):
"""
read the specified Lease
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.read_namespaced_lease_with_http_info(name, namespace, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the Lease (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param bool exact: Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'.
:param bool export: Should this value be exported. Export strips fields that a user can not specify.
:return: V1beta1Lease
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'pretty', 'exact', 'export']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read_namespaced_lease" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `read_namespaced_lease`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `read_namespaced_lease`")
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
if 'exact' in params:
query_params.append(('exact', params['exact']))
if 'export' in params:
query_params.append(('export', params['export']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/coordination.k8s.io/v1beta1/namespaces/{namespace}/leases/{name}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1Lease',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_namespaced_lease(self, name, namespace, body, **kwargs):
"""
replace the specified Lease
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_namespaced_lease(name, namespace, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the Lease (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1beta1Lease body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1beta1Lease
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.replace_namespaced_lease_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.replace_namespaced_lease_with_http_info(name, namespace, body, **kwargs)
return data
def replace_namespaced_lease_with_http_info(self, name, namespace, body, **kwargs):
"""
replace the specified Lease
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_namespaced_lease_with_http_info(name, namespace, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the Lease (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1beta1Lease body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1beta1Lease
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty', 'dry_run']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_namespaced_lease" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_namespaced_lease`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `replace_namespaced_lease`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_namespaced_lease`")
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = []
if 'pretty' in params:
query_params.append(('pretty', params['pretty']))
if 'dry_run' in params:
query_params.append(('dryRun', params['dry_run']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/coordination.k8s.io/v1beta1/namespaces/{namespace}/leases/{name}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1Lease',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 67.653117 | 1,390 | 0.65975 |
58dca6fccff7907b7e38ed30dcf8e3df1f7fcfd7 | 11,075 | py | Python | tensorflow/tools/pip_package/setup.py | ouakif/tensorflow | 63c45aacf30e819b00e74b85bd1c9f11b0760cd3 | [
"Apache-2.0"
] | 3 | 2020-05-28T17:04:59.000Z | 2021-10-20T12:47:18.000Z | tensorflow/tools/pip_package/setup.py | top-on/tensorflow | 6efce9a74d4ba2ba2182d92ac1e4f144b5d755d2 | [
"Apache-2.0"
] | 3 | 2019-07-25T16:55:56.000Z | 2019-08-01T23:44:31.000Z | tensorflow/tools/pip_package/setup.py | top-on/tensorflow | 6efce9a74d4ba2ba2182d92ac1e4f144b5d755d2 | [
"Apache-2.0"
] | 3 | 2019-06-20T07:48:58.000Z | 2021-05-29T12:25:11.000Z | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""TensorFlow is an open source machine learning framework for everyone.
TensorFlow is an open source software library for high performance numerical
computation. Its flexible architecture allows easy deployment of computation
across a variety of platforms (CPUs, GPUs, TPUs), and from desktops to clusters
of servers to mobile and edge devices.
Originally developed by researchers and engineers from the Google Brain team
within Google's AI organization, it comes with strong support for machine
learning and deep learning and the flexible numerical computation core is used
across many other scientific domains.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import fnmatch
import os
import re
import sys
from setuptools import Command
from setuptools import find_packages
from setuptools import setup
from setuptools.command.install import install as InstallCommandBase
from setuptools.dist import Distribution
DOCLINES = __doc__.split('\n')
# This version string is semver compatible, but incompatible with pip.
# For pip, we will remove all '-' characters from this string, and use the
# result for pip.
# Also update tensorflow/tensorflow.bzl and
# tensorflow/core/public/version.h
_VERSION = '2.0.0'
REQUIRED_PACKAGES = [
'absl-py >= 0.7.0',
'astor >= 0.6.0',
'backports.weakref >= 1.0rc1;python_version<"3.4"',
'enum34 >= 1.1.6;python_version<"3.4"',
'gast == 0.2.2',
'google_pasta >= 0.1.6',
'keras_applications >= 1.0.8',
'keras_preprocessing >= 1.1.0',
'numpy >= 1.16.0, < 2.0',
'opt_einsum >= 2.3.2',
'protobuf >= 3.8.0',
'tensorboard >= 2.0.0, < 2.1.0',
'tensorflow_estimator >= 2.0.0, < 2.1.0',
'termcolor >= 1.1.0',
'wrapt >= 1.11.1',
# python3 requires wheel 0.26
'wheel >= 0.26;python_version>="3"',
'wheel;python_version<"3"',
# mock comes with unittest.mock for python3, need to install for python2
'mock >= 2.0.0;python_version<"3"',
# functools comes with python3, need to install the backport for python2
'functools32 >= 3.2.3;python_version<"3"',
'six >= 1.12.0',
]
if sys.byteorder == 'little':
# grpcio does not build correctly on big-endian machines due to lack of
# BoringSSL support.
# See https://github.com/tensorflow/tensorflow/issues/17882.
REQUIRED_PACKAGES.append('grpcio >= 1.8.6')
project_name = 'tensorflow'
if '--project_name' in sys.argv:
project_name_idx = sys.argv.index('--project_name')
project_name = sys.argv[project_name_idx + 1]
sys.argv.remove('--project_name')
sys.argv.pop(project_name_idx)
# tf-nightly should depend on tb-nightly
if 'tf_nightly' in project_name:
for i, pkg in enumerate(REQUIRED_PACKAGES):
if 'tensorboard' in pkg:
REQUIRED_PACKAGES[i] = 'tb-nightly >= 2.1.0a0, < 2.2.0a0'
elif 'tensorflow_estimator' in pkg and '2.0' in project_name:
REQUIRED_PACKAGES[i] = 'tensorflow-estimator-2.0-preview'
elif 'tensorflow_estimator' in pkg:
REQUIRED_PACKAGES[i] = 'tf-estimator-nightly'
# pylint: disable=line-too-long
CONSOLE_SCRIPTS = [
'toco_from_protos = tensorflow.lite.toco.python.toco_from_protos:main',
'tflite_convert = tensorflow.lite.python.tflite_convert:main',
'toco = tensorflow.lite.python.tflite_convert:main',
'saved_model_cli = tensorflow.python.tools.saved_model_cli:main',
# We need to keep the TensorBoard command, even though the console script
# is now declared by the tensorboard pip package. If we remove the
# TensorBoard command, pip will inappropriately remove it during install,
# even though the command is not removed, just moved to a different wheel.
'tensorboard = tensorboard.main:run_main',
'tf_upgrade_v2 = tensorflow.tools.compatibility.tf_upgrade_v2_main:main',
'estimator_ckpt_converter = tensorflow_estimator.python.estimator.tools.checkpoint_converter:main',
]
# pylint: enable=line-too-long
# Only keep freeze_graph console script in 1.X.
if _VERSION.startswith('1.') and '_2.0' not in project_name:
CONSOLE_SCRIPTS.append(
'freeze_graph = tensorflow.python.tools.freeze_graph:run_main')
# remove the tensorboard console script if building tf_nightly
if 'tf_nightly' in project_name:
CONSOLE_SCRIPTS.remove('tensorboard = tensorboard.main:run_main')
TEST_PACKAGES = [
'scipy >= 0.15.1',
]
class BinaryDistribution(Distribution):
def has_ext_modules(self):
return True
class InstallCommand(InstallCommandBase):
"""Override the dir where the headers go."""
def finalize_options(self):
ret = InstallCommandBase.finalize_options(self)
self.install_headers = os.path.join(self.install_purelib, 'tensorflow_core',
'include')
self.install_lib = self.install_platlib
return ret
class InstallHeaders(Command):
"""Override how headers are copied.
The install_headers that comes with setuptools copies all files to
the same directory. But we need the files to be in a specific directory
hierarchy for -I <include_dir> to work correctly.
"""
description = 'install C/C++ header files'
user_options = [('install-dir=', 'd',
'directory to install header files to'),
('force', 'f',
'force installation (overwrite existing files)'),
]
boolean_options = ['force']
def initialize_options(self):
self.install_dir = None
self.force = 0
self.outfiles = []
def finalize_options(self):
self.set_undefined_options('install',
('install_headers', 'install_dir'),
('force', 'force'))
def mkdir_and_copy_file(self, header):
install_dir = os.path.join(self.install_dir, os.path.dirname(header))
# Get rid of some extra intervening directories so we can have fewer
# directories for -I
install_dir = re.sub('/google/protobuf_archive/src', '', install_dir)
install_dir = re.sub('/include/tensorflow_core/', '/include/tensorflow/',
install_dir)
# Copy external code headers into tensorflow_core/include.
# A symlink would do, but the wheel file that gets created ignores
# symlink within the directory hierarchy.
# NOTE(keveman): Figure out how to customize bdist_wheel package so
# we can do the symlink.
external_header_locations = [
'tensorflow_core/include/external/eigen_archive/',
'tensorflow_core/include/external/com_google_absl/',
]
for location in external_header_locations:
if location in install_dir:
extra_dir = install_dir.replace(location, '')
if not os.path.exists(extra_dir):
self.mkpath(extra_dir)
self.copy_file(header, extra_dir)
if not os.path.exists(install_dir):
self.mkpath(install_dir)
return self.copy_file(header, install_dir)
def run(self):
hdrs = self.distribution.headers
if not hdrs:
return
self.mkpath(self.install_dir)
for header in hdrs:
(out, _) = self.mkdir_and_copy_file(header)
self.outfiles.append(out)
def get_inputs(self):
return self.distribution.headers or []
def get_outputs(self):
return self.outfiles
def find_files(pattern, root):
"""Return all the files matching pattern below root dir."""
for dirpath, _, files in os.walk(root):
for filename in fnmatch.filter(files, pattern):
yield os.path.join(dirpath, filename)
so_lib_paths = [
i for i in os.listdir('.')
if os.path.isdir(i) and fnmatch.fnmatch(i, '_solib_*')
]
matches = []
for path in so_lib_paths:
matches.extend(
['../' + x for x in find_files('*', path) if '.py' not in x]
)
if os.name == 'nt':
EXTENSION_NAME = 'python/_pywrap_tensorflow_internal.pyd'
else:
EXTENSION_NAME = 'python/_pywrap_tensorflow_internal.so'
headers = (
list(find_files('*.h', 'tensorflow_core/core')) +
list(find_files('*.h', 'tensorflow_core/stream_executor')) +
list(find_files('*.h', 'google/com_google_protobuf/src')) +
list(find_files('*.inc', 'google/com_google_protobuf/src')) +
list(find_files('*', 'third_party/eigen3')) + list(
find_files('*.h', 'tensorflow_core/include/external/com_google_absl')) +
list(
find_files('*.inc', 'tensorflow_core/include/external/com_google_absl'))
+ list(find_files('*', 'tensorflow_core/include/external/eigen_archive')))
setup(
name=project_name,
version=_VERSION.replace('-', ''),
description=DOCLINES[0],
long_description='\n'.join(DOCLINES[2:]),
url='https://www.tensorflow.org/',
download_url='https://github.com/tensorflow/tensorflow/tags',
author='Google Inc.',
author_email='packages@tensorflow.org',
# Contained modules and scripts.
packages=find_packages(),
entry_points={
'console_scripts': CONSOLE_SCRIPTS,
},
headers=headers,
install_requires=REQUIRED_PACKAGES,
tests_require=REQUIRED_PACKAGES + TEST_PACKAGES,
# Add in any packaged data.
include_package_data=True,
package_data={
'tensorflow': [
EXTENSION_NAME,
] + matches,
},
zip_safe=False,
distclass=BinaryDistribution,
cmdclass={
'install_headers': InstallHeaders,
'install': InstallCommand,
},
# PyPI package information.
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Mathematics',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
],
license='Apache 2.0',
keywords='tensorflow tensor machine learning',
)
| 35.957792 | 103 | 0.685147 |
0ca0258f2dfa763a10162001311e53e9f6caa4ee | 3,682 | py | Python | toontown/suit/SuitGlobals.py | MasterLoopyBM/Toontown | ebed7fc3f2ef06a529cf02eda7ab46361aceef9d | [
"MIT"
] | 1 | 2020-02-07T18:15:12.000Z | 2020-02-07T18:15:12.000Z | toontown/suit/SuitGlobals.py | TrueBlueDogemon/Toontown | ebed7fc3f2ef06a529cf02eda7ab46361aceef9d | [
"MIT"
] | null | null | null | toontown/suit/SuitGlobals.py | TrueBlueDogemon/Toontown | ebed7fc3f2ef06a529cf02eda7ab46361aceef9d | [
"MIT"
] | 2 | 2020-11-08T03:38:35.000Z | 2021-09-02T07:03:47.000Z | # SuitGlobals are used to set the appearance of Cogs.
from toontown.suit import SuitDNA
from pandac.PandaModules import VBase4
SCALE_INDEX = 0 # The scale of the cog
HAND_COLOR_INDEX = 1 # The hand color
HEADS_INDEX = 2 # A list of heads
HEAD_TEXTURE_INDEX = 3 # The texture to use for the head
HEIGHT_INDEX = 4 # The height of the cog
aSize = 6.06 # Size of body type 'a'
bSize = 5.29 # Size of body type 'b'
cSize = 4.14 # Size of body type 'c'
ColdCallerHead = VBase4(0.25, 0.35, 1.0, 1.0) # Head used by Cold Caller
# Bossbots
suitProperties = {'f': (4.0 / cSize, SuitDNA.corpPolyColor, ['flunky', 'glasses'], '', 4.88),
'p': (3.35 / bSize, SuitDNA.corpPolyColor, ['pencilpusher'], '', 5.0),
'ym': (4.125 / aSize, SuitDNA.corpPolyColor, ['yesman'], '', 5.28),
'mm': (2.5 / cSize, SuitDNA.corpPolyColor, ['micromanager'], '', 3.25),
'ds': (4.5 / bSize, SuitDNA.corpPolyColor, ['beancounter'], '', 6.08),
'hh': (6.5 / aSize, SuitDNA.corpPolyColor, ['headhunter'], '', 7.45),
'cr': (6.75 / cSize, VBase4(0.85, 0.55, 0.55, 1.0), ['flunky'], 'corporate-raider.jpg', 8.23),
'tbc': (7.0 / aSize, VBase4(0.75, 0.95, 0.75, 1.0), ['bigcheese'], '', 9.34),
# Lawbots
'bf': (4.0 / cSize, SuitDNA.legalPolyColor, ['tightwad'], 'bottom-feeder.jpg', 4.81),
'b': (4.375 / bSize, VBase4(0.95, 0.95, 1.0, 1.0), ['movershaker'], 'blood-sucker.jpg', 6.17),
'dt': (4.25 / aSize, SuitDNA.legalPolyColor, ['twoface'], 'double-talker.jpg', 5.63),
'ac': (4.35 / bSize, SuitDNA.legalPolyColor, ['ambulancechaser'], '', 6.39),
'bs': (4.5 / aSize, SuitDNA.legalPolyColor, ['backstabber'], '', 6.71),
'sd': (5.65 / bSize, VBase4(0.5, 0.8, 0.75, 1.0), ['telemarketer'], 'spin-doctor.jpg', 7.9),
'le': (7.125 / aSize, VBase4(0.25, 0.25, 0.5, 1.0), ['legaleagle'], '', 8.27),
'bw': (7.0 / aSize, SuitDNA.legalPolyColor, ['bigwig'], '', 8.69),
# Cashbots
'sc': (3.6 / cSize, SuitDNA.moneyPolyColor, ['coldcaller'], '', 4.77),
'pp': (3.55 / aSize, VBase4(1.0, 0.5, 0.6, 1.0), ['pennypincher'], '', 5.26),
'tw': (4.5 / cSize, SuitDNA.moneyPolyColor, ['tightwad'], '', 5.41),
'bc': (4.4 / bSize, SuitDNA.moneyPolyColor, ['beancounter'], '', 5.95),
'nc': (5.25 / aSize, SuitDNA.moneyPolyColor, ['numbercruncher'], '', 7.22),
'mb': (5.3 / cSize, SuitDNA.moneyPolyColor, ['moneybags'], '', 6.97),
'ls': (6.5 / bSize, VBase4(0.5, 0.85, 0.75, 1.0), ['loanshark'], '', 8.58),
'rb': (7.0 / aSize, SuitDNA.moneyPolyColor, ['yesman'], 'robber-baron.jpg', 8.95),
# Sellbots
'cc': (3.5 / cSize, VBase4(0.55, 0.65, 1.0, 1.0), ['coldcaller'], '', 4.63),
'tm': (3.75 / bSize, SuitDNA.salesPolyColor, ['telemarketer'], '', 5.24),
'nd': (4.35 / aSize, SuitDNA.salesPolyColor, ['numbercruncher'], 'name-dropper.jpg', 5.98),
'gh': (4.75 / cSize, SuitDNA.salesPolyColor, ['gladhander'], '', 6.4),
'ms': (4.75 / bSize, SuitDNA.salesPolyColor, ['movershaker'], '', 6.7),
'tf': (5.25 / aSize, SuitDNA.salesPolyColor, ['twoface'], '', 6.95),
'm': (5.75 / aSize, SuitDNA.salesPolyColor, ['twoface'], 'mingler.jpg', 7.61),
'mh': (7.0 / aSize, SuitDNA.salesPolyColor, ['yesman'], '', 8.95),
}
| 66.945455 | 112 | 0.508691 |
5ba8e4b601c80a6cf857f8ee7a46f4a89888e4fc | 8,682 | py | Python | src/pruning/utils.py | MohammedAljahdali/shrinkbench | f08a0e27d7e1118a46605e5ec9026ecaa931365e | [
"MIT"
] | null | null | null | src/pruning/utils.py | MohammedAljahdali/shrinkbench | f08a0e27d7e1118a46605e5ec9026ecaa931365e | [
"MIT"
] | null | null | null | src/pruning/utils.py | MohammedAljahdali/shrinkbench | f08a0e27d7e1118a46605e5ec9026ecaa931365e | [
"MIT"
] | null | null | null | """Auxiliary utils for implementing pruning strategies
"""
from collections import OrderedDict, defaultdict
import numpy
import torch
from torch import nn
from detectron2.structures.image_list import ImageList
from detectron2.structures.instances import Instances
def hook_applyfn(hook, model, forward=False, backward=False):
"""
[description]
Arguments:
hook {[type]} -- [description]
model {[type]} -- [description]
Keyword Arguments:
forward {bool} -- [description] (default: {False})
backward {bool} -- [description] (default: {False})
Returns:
[type] -- [description]
"""
assert forward ^ backward, \
"Either forward or backward must be True"
hooks = []
def register_hook(module):
if (
not isinstance(module, nn.Sequential)
and
not isinstance(module, nn.ModuleList)
and
not isinstance(module, nn.ModuleDict)
and
not (module == model)
):
if forward:
hooks.append(module.register_forward_hook(hook))
if backward:
hooks.append(module.register_backward_hook(hook))
return register_hook, hooks
def get_params(model, recurse=False):
"""Returns dictionary of paramters
Arguments:
model {torch.nn.Module} -- Network to extract the parameters from
Keyword Arguments:
recurse {bool} -- Whether to recurse through children modules
Returns:
Dict(str:numpy.ndarray) -- Dictionary of named parameters their
associated parameter arrays
"""
params = {k: v.detach().cpu().numpy().copy()
for k, v in model.named_parameters(recurse=recurse)}
return params
def get_activations(model, input):
activations = OrderedDict()
def store_activations(module, input, output):
if isinstance(module, nn.ReLU):
# TODO ResNet18 implementation reuses a
# single ReLU layer?
return
assert module not in activations, \
f"{module} already in activations"
# TODO: Deal with the segmentation models correctly
# if isinstance(output, torch.Tensor):
# output_data = output
# elif isinstance(output, dict):
# output_data = list(output.values())[0]
# elif isinstance(output, list):
# output_data = output[0].tensor
# elif isinstance(output, tuple):
# output_data = output[0][0]
# else:
# print(output)
# print(type(output))
# for t in output:
# try:
# print(t.shape)
# except:
# pass
# print('t', type(t))
# for tt in t:
# print('tt', type(tt))
# try:
# print(tt.shape)
# except:
# pass
# for ttt in tt:
# print('ttt', type(ttt))
# try:
# print(ttt.shape)
# except:
# pass
# raise ValueError()
#
# if isinstance(input[0], torch.Tensor):
# input_data = input[0]
# elif isinstance(input[0], dict):
# input_data = input[0]['image']
# elif isinstance(input[0], list) or isinstance(input[0], ImageList):
# input_data = input[0][0]
# else:
# print(input[0])
# print(type(input[0]))
# for t in input[0]:
# try:
# print(t.shape)
# except:
# pass
# print('t', type(t))
# for tt in t:
# print('tt', type(tt))
# try:
# print(tt.shape)
# except:
# pass
# for ttt in tt:
# print('ttt', type(ttt))
# try:
# print(ttt.shape)
# except:
# pass
# break
# break
# break
#
# raise ValueError()
#
# if isinstance(output_data, Instances):
# output_data = output_data.get('objectness_logits')
# output_data = output_data.detach().cpu().numpy().copy()
# elif isinstance(output_data, torch.Tensor):
# output_data = output_data.detach().cpu().numpy().copy()
# elif not isinstance(output_data, torch.Tensor):
# print(f"Input data type {type(input_data)} --- Output data type {type(output_data)}")
# print(output_data)
# try:
# print(output_data.get('objectness_logits'))
# except:
# pass
# if isinstance(input_data, torch.Tensor):
# input_data = input_data.detach().cpu().numpy().copy()
# elif not isinstance(input_data, torch.Tensor):
# # raise ValueError()
# print(f"Input data type {type(input_data)} --- Output data type {type(output_data)}")
# print(input_data)
# try:
# print(input_data)
# except:
# pass
activations[module] = (input_data, output_data,)
fn, hooks = hook_applyfn(store_activations, model, forward=True)
model.apply(fn)
with torch.no_grad():
model(input)
for h in hooks:
h.remove()
return activations
def get_gradients(model, inputs, outputs):
# TODO implement using model.register_backward_hook()
# So it is harder than it seems, the grad_input contains also the gradients
# with respect to the weights and so far order seems to be (bias, input, weight)
# which is confusing
# Moreover, a lot of the time the output activation we are looking for is the
# one after the ReLU and F.ReLU (or any functional call) will not be called by
# the forward or backward hook
# Discussion here
# https://discuss.pytorch.org/t/how-to-register-hook-function-for-functional-form/25775
# Best way seems to be monkey patching F.ReLU & other functional ops
# That'll also help figuring out how to compute a module graph
pass
def get_param_gradients(model, inputs, outputs, loss_func=None, by_module=True):
gradients = OrderedDict()
if loss_func is None:
loss_func = nn.CrossEntropyLoss()
training = model.training
model.train()
pred = model(inputs)
loss = loss_func(pred, outputs)
loss.backward()
if by_module:
gradients = defaultdict(OrderedDict)
for module in model.modules():
assert module not in gradients
for name, param in module.named_parameters(recurse=False):
if param.requires_grad and param.grad is not None:
gradients[module][name] = param.grad.detach().cpu().numpy().copy()
else:
gradients = OrderedDict()
for name, param in model.named_parameters():
assert name not in gradients
if param.requires_grad and param.grad is not None:
gradients[name] = param.grad.detach().cpu().numpy().copy()
model.zero_grad()
model.train(training)
return gradients
def fraction_to_keep(compression, model, prunable_modules):
""" Return fraction of params to keep to achieve desired compression ratio
Compression = total / ( fraction * prunable + (total-prunable))
Using algrebra fraction is equal to
fraction = total/prunable * (1/compression - 1) + 1
Arguments:
compression {float} -- Desired overall compression
model {torch.nn.Module} -- Full model for which to compute the fraction
prunable_modules {List(torch.nn.Module)} -- Modules that can be pruned in the model.
Returns:
{float} -- Fraction of prunable parameters to keep to achieve desired compression
"""
from src.metrics import model_size
total_size, _ = model_size(model)
prunable_size = sum([model_size(m)[0] for m in prunable_modules])
nonprunable_size = total_size - prunable_size
fraction = 1 / prunable_size * (total_size/compression - nonprunable_size)
assert 0 < fraction <= 1, \
f"Cannot compress to {1/compression} model with {nonprunable_size/total_size}" + \
"fraction of unprunable parameters"
return fraction
| 34.047059 | 99 | 0.558973 |
5ab3fde5f29e9b64b2dcd4064fee075d3d89a765 | 10,144 | py | Python | djangae/contrib/uniquetool/models.py | ikedaosushi/djangae | 5fd2f8d70699fbbf155740effe42a36b205a6540 | [
"BSD-3-Clause"
] | null | null | null | djangae/contrib/uniquetool/models.py | ikedaosushi/djangae | 5fd2f8d70699fbbf155740effe42a36b205a6540 | [
"BSD-3-Clause"
] | null | null | null | djangae/contrib/uniquetool/models.py | ikedaosushi/djangae | 5fd2f8d70699fbbf155740effe42a36b205a6540 | [
"BSD-3-Clause"
] | null | null | null | import datetime
import logging
from django.conf import settings
from django.apps import apps
from django.db import models, connections
from django.dispatch import receiver
from django.db.models.signals import post_save
from django.utils import six
from google.appengine.api import datastore
from google.appengine.ext import deferred
from djangae.db import transaction
from djangae.fields import RelatedSetField
from djangae.contrib.mappers.pipes import MapReduceTask
from djangae.contrib.processing.mapreduce import map_entities
from djangae.contrib.processing.mapreduce.utils import qualname
from djangae.db.utils import django_instance_to_entities
from djangae.db.unique_utils import unique_identifiers_from_entity
from djangae.db.constraints import UniqueMarker
from djangae.db.caching import disable_cache
logger = logging.getLogger(__name__)
ACTION_TYPES = [
('check', 'Check'), # Verify all models unique contraint markers exist and are assigned to it.
('repair', 'Repair'), # Recreate any missing markers
('clean', 'Clean'), # Remove any marker that isn't properly linked to an instance.
]
ACTION_STATUSES = [
('running', 'Running'),
('done', 'Done'),
]
LOG_MSGS = [
('missing_marker', "Marker for the unique constraint is missing"),
('missing_instance', "Unique constraint marker exists, but doesn't point to the instance"),
('already_assigned', "Marker is assigned to a different instance already"),
('old_instance_key', "Marker was created when instance was a StringProperty")
]
MAX_ERRORS = 100
def encode_model(model):
return "%s,%s" % (model._meta.app_label, model._meta.model_name)
def decode_model(model_str):
return apps.get_model(*model_str.split(','))
class ActionLog(models.Model):
instance_key = models.TextField()
marker_key = models.CharField(max_length=500)
log_type = models.CharField(max_length=255, choices=LOG_MSGS)
action = models.ForeignKey('UniqueAction')
class UniqueAction(models.Model):
action_type = models.CharField(choices=ACTION_TYPES, max_length=100)
model = models.CharField(max_length=100)
db = models.CharField(max_length=100, default='default')
status = models.CharField(choices=ACTION_STATUSES, default=ACTION_STATUSES[0][0], editable=False, max_length=100)
logs = RelatedSetField(ActionLog, editable=False)
def _log_action(action_id, log_type, instance_key, marker_key):
@transaction.atomic(xg=True)
def _atomic(action_id, log_type, instance_key, marker_key):
action = UniqueAction.objects.get(pk=action_id)
if len(action.logs) > MAX_ERRORS:
return
log = ActionLog.objects.create(
action_id=action_id,
log_type=log_type,
instance_key=instance_key,
marker_key=marker_key)
action.logs.add(log)
action.save()
_atomic(action_id, log_type, instance_key, marker_key)
def log(action_id, log_type, instance_key, marker_key, defer=True):
""" Shorthand for creating an ActionLog.
Defer doesn't accept an inline function or an atomic wrapped function directly, so
we defer a helper function, which wraps the transactionaly decorated one. """
if defer:
deferred.defer(_log_action, action_id, log_type, instance_key, marker_key)
else:
_log_action(action_id, log_type, instance_key, marker_key)
@receiver(post_save, sender=UniqueAction)
def start_action(sender, instance, created, raw, **kwargs):
if created == False:
# we are saving because status is now "done"?
return
kwargs = dict(
action_pk=instance.pk,
)
if instance.action_type == "clean":
kwargs.update(model=instance.model)
CleanMapper(db=instance.db).start(**kwargs)
else:
kwargs.update(repair=instance.action_type=="repair")
CheckRepairMapper(model=decode_model(instance.model), db=instance.db).start(**kwargs)
def _finish(*args, **kwargs):
action_pk = kwargs.get('action_pk')
@transaction.atomic
def finish_the_action():
action = UniqueAction.objects.get(pk=action_pk)
action.status = "done"
action.save()
finish_the_action()
class RawMapperMixin(object):
def get_model_app_(self):
return None
def start(self, *args, **kwargs):
if 'map' not in self.__class__.__dict__:
raise TypeError('No static map method defined on class {cls}'.format(self.__class__))
if 'finish' in self.__class__.__dict__:
finish = self.__class__.finish
else:
finish = None
kwargs["db"] = self.db
return map_entities(
self.model._meta.db_table if self.model else self.kind,
settings.DATABASES.get(self.db, {}).get('NAMESPACE', ''),
".".join([qualname(self.__class__), "run_map"]),
finalize_func=".".join([qualname(self.__class__), "finish"]) if finish else None,
_output_writer=self.output_writer_spec,
_shards=self.shard_count,
_job_name=self.job_name,
_queue_name=kwargs.pop('queue_name', self.queue_name),
*args,
**kwargs
)
class CheckRepairMapper(MapReduceTask):
name = 'action_mapper'
kind = '_djangae_unique_marker'
def start(self, *args, **kwargs):
kwargs['db'] = self.db
return super(CheckRepairMapper, self).start(*args, **kwargs)
@staticmethod
def finish(*args, **kwargs):
_finish(*args, **kwargs)
@staticmethod
def map(instance, *args, **kwargs):
""" Figure out what markers the instance should use and verify they're attached to
this instance. Log any weirdness and in repair mode - recreate missing markers. """
action_id = kwargs.get("action_pk")
repair = kwargs.get("repair")
alias = kwargs.get("db", "default")
namespace = settings.DATABASES.get(alias, {}).get("NAMESPACE")
assert alias == (instance._state.db or "default")
entity, _ = django_instance_to_entities(connections[alias], instance._meta.fields, raw=True, instance=instance, check_null=False)
identifiers = unique_identifiers_from_entity(type(instance), entity, ignore_pk=True)
identifier_keys = [datastore.Key.from_path(UniqueMarker.kind(), i, namespace=namespace) for i in identifiers]
markers = datastore.Get(identifier_keys)
instance_key = str(entity.key())
markers_to_save = []
for i, m in zip(identifier_keys, markers):
marker_key = str(i)
if m is None:
# Missig marker
if repair:
new_marker = datastore.Entity(UniqueMarker.kind(), name=i.name(), namespace=namespace)
new_marker['instance'] = entity.key()
new_marker['created'] = datetime.datetime.now()
markers_to_save.append(new_marker)
else:
log(action_id, "missing_marker", instance_key, marker_key)
elif 'instance' not in m or not m['instance']:
# Marker with missining instance attribute
if repair:
m['instance'] = entity.key()
markers_to_save.append(m)
else:
log(action_id, "missing_instance", instance_key, marker_key)
elif m['instance'] != entity.key():
if isinstance(m['instance'], six.string_types):
m['instance'] = datastore.Key(m['instance'])
if repair:
markers_to_save.append(m)
else:
log(action_id, "old_instance_key", instance_key, marker_key)
if m['instance'] != entity.key():
# Marker already assigned to a different instance
log(action_id, "already_assigned", instance_key, marker_key)
# Also log in repair mode as reparing would break the other instance.
if markers_to_save:
datastore.Put(markers_to_save)
class CleanMapper(RawMapperMixin, MapReduceTask):
name = 'action_clean_mapper'
kind = '_djangae_unique_marker'
@staticmethod
def finish(*args, **kwargs):
_finish(*args, **kwargs)
@staticmethod
def map(entity, model, *args, **kwargs):
""" The Clean mapper maps over all UniqueMarker instances. """
alias = kwargs.get("db", "default")
namespace = settings.DATABASES.get(alias, {}).get("NAMESPACE", "")
model = decode_model(model)
if not entity.key().id_or_name().startswith(model._meta.db_table + "|"):
# Only include markers which are for this model
return
assert namespace == entity.namespace()
with disable_cache():
# At this point, the entity is a unique marker that is linked to an instance of 'model', now we should see if that instance exists!
instance_id = entity["instance"].id_or_name()
try:
instance = model.objects.using(alias).get(pk=instance_id)
except model.DoesNotExist:
logger.info("Deleting unique marker %s because the associated instance no longer exists", entity.key().id_or_name())
datastore.Delete(entity)
return
# Get the possible unique markers for the entity, if this one doesn't exist in that list then delete it
instance_entity, _ = django_instance_to_entities(connections[alias], instance._meta.fields, raw=True, instance=instance, check_null=False)
identifiers = unique_identifiers_from_entity(model, instance_entity, ignore_pk=True)
identifier_keys = [datastore.Key.from_path(UniqueMarker.kind(), i, namespace=entity["instance"].namespace()) for i in identifiers]
if entity.key() not in identifier_keys:
logger.info("Deleting unique marker %s because the it no longer represents the associated instance state", entity.key().id_or_name())
datastore.Delete(entity)
| 38.424242 | 150 | 0.656743 |
7998657608a89de39ecf0470b034917607e85daf | 2,826 | py | Python | src/bitmessagekivy/tests/test_trash_message.py | Bitmessage/PyBitmessage | 4421b4509caa49d0786b2e89ce8068170d877015 | [
"MIT",
"BSD-2-Clause-FreeBSD"
] | 1,583 | 2015-01-01T13:03:20.000Z | 2022-03-31T23:10:00.000Z | src/bitmessagekivy/tests/test_trash_message.py | Bitmessage/PyBitmessage | 4421b4509caa49d0786b2e89ce8068170d877015 | [
"MIT",
"BSD-2-Clause-FreeBSD"
] | 841 | 2015-01-01T14:51:48.000Z | 2022-03-25T06:45:14.000Z | src/bitmessagekivy/tests/test_trash_message.py | Bitmessage/PyBitmessage | 4421b4509caa49d0786b2e89ce8068170d877015 | [
"MIT",
"BSD-2-Clause-FreeBSD"
] | 482 | 2015-01-07T00:53:25.000Z | 2022-03-24T15:58:12.000Z | from .telenium_process import TeleniumTestProcess
from .common import skip_screen_checks
from .common import ordered
class TrashMessage(TeleniumTestProcess):
"""Trash Screen Functionality Testing"""
@skip_screen_checks
@ordered
def test_delete_trash_message(self):
"""Delete Trash message permanently from trash message listing"""
# Checking current Screen(Inbox screen)
self.assert_wait_no_except('//ScreenManager[@current]', timeout=15, value='inbox')
# Checking "Menu" is rendered
self.assertExists('//MDActionTopAppBarButton[@icon=\"menu\"]', timeout=5)
# this is for opening Nav drawer
self.cli.wait_click('//MDActionTopAppBarButton[@icon=\"menu\"]', timeout=5)
# checking state of Nav drawer
self.assertExists("//MDNavigationDrawer[@state~=\"open\"]", timeout=5)
# this is for opening Trash screen
self.cli.wait_click('//NavigationItem[@text=\"Trash\"]', timeout=2)
# Checking Trash Screen
self.assertExists("//ScreenManager[@current=\"trash\"]", timeout=5)
# This is for swiping message to activate delete icon.
self.cli.wait_drag(
'//Trash[0]//TwoLineAvatarIconListItem[0]/BoxLayout[1]',
'//Trash[0]//TwoLineAvatarIconListItem[0]/BoxLayout[2]', 2, timeout=5)
# Checking the "trash-can" is rendered
self.assertExists(
"//MDList[0]/CutsomSwipeToDeleteItem[0]//MDIconButton[@icon~=\"trash-can\"]", timeout=2)
# Delete icon is enabled
self.cli.setattr('//MDList[0]/CutsomSwipeToDeleteItem[0]//MDIconButton', 'disabled', False)
# Checking the Dialog popup is closed
self.assertNotExists('//MDDialog[@open]', timeout=5)
# Checking the delete icon is rendered and functional
self.assertExists('//MDList[0]/CutsomSwipeToDeleteItem[0]//MDIconButton[0]', timeout=5)
# Click on the delete icon to delete the current message
self.cli.wait_click('//MDList[0]/CutsomSwipeToDeleteItem[0]//MDIconButton[0]', timeout=5)
# Checking Confirm Popup is Opened
self.assertExists('//MDDialog[@open]', timeout=5)
# Checking the popup's 'Yes' button is rendered.
self.assertExists("//MDDialog//MDFlatButton[@text=\"Yes\"]", timeout=5)
# Clicking on 'Yes' Button on Popup to confirm delete.
self.cli.wait_click('//MDFlatButton[@text=\"Yes\"]', timeout=5)
# Checking the Dialog is closed on click "Yes" button
self.assertNotExists('//MDDialog[@open]', timeout=5)
# Checking the message is rendered on Trash screen
self.assertExists('//MDList[0]/CutsomSwipeToDeleteItem[0]', timeout=5)
# Checking Current screen is Trash Screen
self.assertExists("//ScreenManager[@current=\"trash\"]", timeout=5)
| 54.346154 | 100 | 0.668436 |
49ff16c4b485979ee35f59bc06e7ca34e0b7a947 | 1,604 | py | Python | Python/Buch_ATBS/Teil_2/Kapitel_13_Arbeiten_mit_Word_und_PDF_Dokumenten/06_pdf_seiten_mit_bestimmtem_textinhalt_finden/06_pdf_seiten_mit_bestimmtem_textinhalt_finden.py | Apop85/Scripts | e71e1c18539e67543e3509c424c7f2d6528da654 | [
"MIT"
] | null | null | null | Python/Buch_ATBS/Teil_2/Kapitel_13_Arbeiten_mit_Word_und_PDF_Dokumenten/06_pdf_seiten_mit_bestimmtem_textinhalt_finden/06_pdf_seiten_mit_bestimmtem_textinhalt_finden.py | Apop85/Scripts | e71e1c18539e67543e3509c424c7f2d6528da654 | [
"MIT"
] | 6 | 2020-12-24T15:15:09.000Z | 2022-01-13T01:58:35.000Z | Python/Buch_ATBS/Teil_2/Kapitel_13_Arbeiten_mit_Word_und_PDF_Dokumenten/06_pdf_seiten_mit_bestimmtem_textinhalt_finden/06_pdf_seiten_mit_bestimmtem_textinhalt_finden.py | Apop85/Scripts | 1d8dad316c55e1f1343526eac9e4b3d0909e4873 | [
"MIT"
] | null | null | null | # 06_pdf_seiten_mit_bestimmtem_textinhalt_finden.py
# Dieses Übungsscript soll alle PDF-Dateien eines Unterordners durchsuchen, diese Auslesen
# und wenn eine Seite den gesuchten String enthält in ein neues File speichern.
import os, PyPDF2, re
os.chdir(os.path.dirname(__file__))
target_path='.\\search_me'
target_file='.\\results.pdf'
if os.path.exists(target_file):
os.remove(target_file)
print('Bitte Suchbegriff eingeben:')
search_string=input()
# Finde alle PDF's im Ordner search_me und in dessen Unterordner
file_list=[]
for path in os.walk(target_path):
for i in range(len(path)):
if len(path[i]) > 0 and type(path[i]) == list:
for file_name in path[i]:
possible_pdf=(path[0]+'\\'+file_name)
if os.path.isfile(possible_pdf):
file_list+=[possible_pdf]
# Öffne alle PDF's im lesemodus
write_pdf=PyPDF2.PdfFileWriter()
counter=0
for file_name in file_list:
print('öffne File: '+file_name)
pdf_file_open=open(file_name, 'rb')
pdf_content=PyPDF2.PdfFileReader(pdf_file_open)
for page in range(pdf_content.numPages):
current_page=pdf_content.getPage(page)
extracted=current_page.extractText()
search_pattern=re.compile(r'.?'+search_string.lower()+r'.?')
search_results=search_pattern.findall(extracted.lower())
if len(search_results) > 0:
write_pdf.addPage(current_page)
counter+=1
target_file_open=open(target_file, 'wb')
write_pdf.write(target_file_open)
target_file_open.close()
print('Gefundene Einträge: '+str(counter)) | 34.12766 | 91 | 0.703865 |
52d0c6ae377038a2a14e3ab6ae3bfc9c2c72cd60 | 118 | py | Python | figcow/__main__.py | donno2048/figcow | 247608d4ee3ba4ed9064bf063940db8df712b325 | [
"MIT"
] | null | null | null | figcow/__main__.py | donno2048/figcow | 247608d4ee3ba4ed9064bf063940db8df712b325 | [
"MIT"
] | null | null | null | figcow/__main__.py | donno2048/figcow | 247608d4ee3ba4ed9064bf063940db8df712b325 | [
"MIT"
] | null | null | null | from . import cow
from sys import argv
def main(): print(cow(" ".join(argv[1:])))
if __name__ == "__main__": main() | 29.5 | 43 | 0.644068 |
508e3073d9f14ad9b2f078378ca993392b3938d6 | 134 | py | Python | proxies/__init__.py | ivelinahristova/webanalysis | ff73b65799dc1465b9138a8742ea74b9da171c8d | [
"MIT"
] | null | null | null | proxies/__init__.py | ivelinahristova/webanalysis | ff73b65799dc1465b9138a8742ea74b9da171c8d | [
"MIT"
] | 8 | 2021-04-10T17:55:31.000Z | 2021-04-19T14:45:14.000Z | proxies/__init__.py | ivelinahristova/webanalysis | ff73b65799dc1465b9138a8742ea74b9da171c8d | [
"MIT"
] | null | null | null | """
proxies
~~~~~~~~~~~~~~~~~~
This module contains Proxies service to extract proxy data
(user agenta, ips, etc).
""" | 22.333333 | 62 | 0.544776 |
169c8c2393a919a3ae8c5434745fb9a9718c4669 | 67 | py | Python | text/src/autogluon/text/__init__.py | mseeger/autogluon-1 | e8d82363ce07fd8e3087bcdd2d71c6f6bd8fd7a0 | [
"Apache-2.0"
] | null | null | null | text/src/autogluon/text/__init__.py | mseeger/autogluon-1 | e8d82363ce07fd8e3087bcdd2d71c6f6bd8fd7a0 | [
"Apache-2.0"
] | null | null | null | text/src/autogluon/text/__init__.py | mseeger/autogluon-1 | e8d82363ce07fd8e3087bcdd2d71c6f6bd8fd7a0 | [
"Apache-2.0"
] | null | null | null | from .text_classification import *
from .text_prediction import *
| 16.75 | 34 | 0.80597 |
6364b09274c39e11bfe6951257f8713bcc837e32 | 204 | py | Python | backend/server/apps/games/admin.py | SharyZ/gaming-dashboard-v2 | e257371b651d67cd02615068ae05f0847c673a80 | [
"MIT"
] | null | null | null | backend/server/apps/games/admin.py | SharyZ/gaming-dashboard-v2 | e257371b651d67cd02615068ae05f0847c673a80 | [
"MIT"
] | null | null | null | backend/server/apps/games/admin.py | SharyZ/gaming-dashboard-v2 | e257371b651d67cd02615068ae05f0847c673a80 | [
"MIT"
] | null | null | null | from django.contrib import admin
from apps.games.models import Category
from apps.games.models import Game
# Register your models here.
admin.site.register(Category)
admin.site.register(Game)
| 20.4 | 39 | 0.77451 |
a0b9d6380f31de0dc7f896c9332383121696aa4e | 1,747 | py | Python | coursescan_calibrate.py | cdoolin/labtools | 867a2726f5a707a8f5e698bf4a6bb4de40cfbe27 | [
"MIT"
] | 1 | 2021-06-07T16:51:09.000Z | 2021-06-07T16:51:09.000Z | coursescan_calibrate.py | cdoolin/labtools | 867a2726f5a707a8f5e698bf4a6bb4de40cfbe27 | [
"MIT"
] | null | null | null | coursescan_calibrate.py | cdoolin/labtools | 867a2726f5a707a8f5e698bf4a6bb4de40cfbe27 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
#from numpy import *
#from matplotlib.pyplot import *
from time import sleep
from datetime import datetime
# use argparse module to read commandline options
import argparse
pars = argparse.ArgumentParser(description="""
scans newfocus piezo using lasernet. takes data at piezo offsets.
""")
pars.add_argument("--start", default=765, type=float, help="piezo start")
pars.add_argument("--stop", default=781, type=float, help="piezo stop")
pars.add_argument("--step", default=0.5, type=float, help="piezo step")
pars.add_argument("--daq", help="channel to take nidaq data")
pars.add_argument("--laser", default="localhost", help="set lasernet address")
pars.add_argument("-w", "--wave", default=None, help="wavelength meter address")
args = pars.parse_args()
#
# lasernet client (through websockets)
#
import labdrivers.websocks
laser = labdrivers.websocks.LaserClient(args.laser)
#
# Wavelength Meter
#
import labdrivers.rvisa
wlm = labdrivers.rvisa.WlMeter(args.wave)
#
# DAQ
#
import labdrivers.daq
daq = labdrivers.daq.SimpleDaq(args.daq, 5000, 100, maxv=10.)
def stepto(a, b, d):
d = abs(d) if b > a else -abs(d)
yield a
while abs(b - a) > abs(d):
a += d
yield a
yield b
import numpy
wls = numpy.array(list(stepto(args.start, args.stop, args.step)))
wlms = numpy.empty_like(wls)
trans = numpy.empty_like(wls)
for i, wl in enumerate(wls):
laser.set_wave(wl)
wlms[i] = wlm.wl()
trans[i] = numpy.mean(daq.read())
print("%s %s %s" % (wl, wlms[i], trans[i]))
from datetime import datetime
fname = datetime.now().strftime("%y.%m.%d_%H%M%S") + "_courscan_calibrate.npy"
print("saving to %s" % fname)
numpy.save(fname, numpy.vstack((wls, wlms, trans)).T)
| 21.8375 | 80 | 0.692044 |
c946b88ddb79a585b9dabd3268c97246ad8b4c11 | 131 | py | Python | app/main/__init__.py | alphonce-otieno-odhiambo/BOLD-THOUGHTS | 50683af21a77682c7f4fb27eef72eb72189d4a82 | [
"MIT"
] | null | null | null | app/main/__init__.py | alphonce-otieno-odhiambo/BOLD-THOUGHTS | 50683af21a77682c7f4fb27eef72eb72189d4a82 | [
"MIT"
] | null | null | null | app/main/__init__.py | alphonce-otieno-odhiambo/BOLD-THOUGHTS | 50683af21a77682c7f4fb27eef72eb72189d4a82 | [
"MIT"
] | null | null | null | from flask import Blueprint
from werkzeug.utils import format_string
main = Blueprint('main', __name__)
from . import views, forms | 26.2 | 40 | 0.801527 |
ed1a9708f71dcac12d84ec6ea4e4a463809b90e7 | 9,059 | py | Python | tpDcc/libs/datalibrary/dccs/maya/core/mirrortable.py | tpDcc/tpDcc-libs-datalibrary | e9678f3d6f0b3a5e0ee3a16d2e8d56627cfb2d54 | [
"MIT"
] | null | null | null | tpDcc/libs/datalibrary/dccs/maya/core/mirrortable.py | tpDcc/tpDcc-libs-datalibrary | e9678f3d6f0b3a5e0ee3a16d2e8d56627cfb2d54 | [
"MIT"
] | null | null | null | tpDcc/libs/datalibrary/dccs/maya/core/mirrortable.py | tpDcc/tpDcc-libs-datalibrary | e9678f3d6f0b3a5e0ee3a16d2e8d56627cfb2d54 | [
"MIT"
] | null | null | null | #! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
Module that contains base mirror table data transfer object implementation for Maya
"""
from __future__ import print_function, division, absolute_import
import re
import logging
import maya.cmds
from tpDcc import dcc
from tpDcc.libs.python import python, decorators
from tpDcc.dccs.maya.core import animation as anim_utils, decorators as maya_decorators
from tpDcc.libs.datalibrary.core import consts, exceptions, mirrortable
from tpDcc.libs.datalibrary.dccs.maya.core import utils
logger = logging.getLogger(consts.LIB_ID)
class MayaMirrorTable(mirrortable.BaseMirrorTable):
def __init__(self, *args, **kwargs):
super(MayaMirrorTable, self).__init__(*args, **kwargs)
# ============================================================================================================
# OVERRIDES
# ============================================================================================================
@classmethod
def find_side(cls, objects, regex_sides):
"""
Returns the naming convention for the given object names
:param objects: list(str)
:param regex_sides: str or list(str)
:return: str
"""
if python.is_string(regex_sides):
regex_sides = regex_sides.split('|')
regex_sides = python.force_list(regex_sides)
regex_sides = [re.compile(side) for side in regex_sides]
for obj in objects:
obj = obj.split('|')[-1].split(':')[-1]
for regex_side in regex_sides:
match = regex_side.search(obj)
if match:
side = match.group()
if obj.startswith(side):
side += '*'
if obj.endswith(side):
side = '*' + side
return side
return ''
@staticmethod
def replace_prefix(name, old, new):
"""
Replaces the given old prefix with the given new one
:param name: str
:param old: str
:param new: str
:return: str
"""
target_name = name
old = old.replace('*', '')
new = new.replace('*', '')
# Support for namespaces
if ':' in name:
target_name = MayaMirrorTable._right_replace(name, ':' + old, ':' + new, 1)
if name != target_name:
return target_name
# Support for prefix with long name
if '|' in name:
target_name = name.replace('|' + old, '|' + new)
elif target_name.startswith(old):
target_name = name.replacde(old, new, 1)
return target_name
@staticmethod
def replace_suffix(name, old, new):
"""
Replaces the given old suffix with the given new one
:param name: str
:param old: str
:param new: str
:return: str
"""
target_name = name
old = old.replace('*', '')
new = new.replace('*', '')
# Support for suffix with long name
if '|' in name:
target_name = name.replace(old + '|', new + '|')
# For example, test:footR
if target_name.endswith(old):
target_name = target_name[:-len(old)] + new
return target_name
def match_objects(self, objects=None, **kwargs):
namespaces = kwargs.pop('namespaces', None)
source_objects = list(self.objects().keys())
matches = utils.match_names(source_objects, target_objects=objects, target_namespaces=namespaces)
for source_node, target_node in matches:
target_name = target_node.name()
mirror_axis = self.mirror_axis(source_node.name())
yield source_node.name(), target_name, mirror_axis
def is_attribute_mirrored(self, attr, mirror_axis):
if mirror_axis == [-1, 1, 1]:
if attr == "translateX" or attr == "rotateY" or attr == "rotateZ":
return True
elif mirror_axis == [1, -1, 1]:
if attr == "translateY" or attr == "rotateX" or attr == "rotateZ":
return True
elif mirror_axis == [1, 1, -1]:
if attr == "translateZ" or attr == "rotateX" or attr == "rotateY":
return True
elif mirror_axis == [-1, -1, -1]:
if attr == "translateX" or attr == "translateY" or attr == "translateZ":
return True
return False
@decorators.timestamp
@maya_decorators.undo
@maya_decorators.show_wait_cursor
@maya_decorators.restore_selection
def load(self, *args, **kwargs):
objects = kwargs.get('objects', None)
namespaces = kwargs.get('namespaces', None)
option = kwargs.get('option', None)
keys_option = kwargs.get('keys_option', None)
time = kwargs.get('time', None)
if option and not isinstance(option, int):
if option.lower() == 'swap':
option = 0
elif option.lower() == 'left to right':
option = 1
elif option.lower() == 'right to left':
option = 2
else:
raise ValueError('Invalid load option: {}'.format(option))
self.validate(namespaces=namespaces)
results = dict()
animation = True
found_object = False
source_objects = list(self.objects().keys())
if option is None:
option = mirrortable.MirrorOptions.Swap
if keys_option == mirrortable.KeysOptions.All:
time = None
elif keys_option == mirrortable.KeysOptions.SelectedRange:
time = anim_utils.get_selected_frame_range()
# check that given time is not a single frame
if time and time[0] == time[1]:
time = None
animation = None
matches = utils.match_names(source_objects=source_objects, target_objects=objects, target_namespaces=namespaces)
for source_node, target_node in matches:
target_object = target_node.name()
target_object2 = self.mirror_object(target_object) or target_object
if target_object2 not in results:
results[target_object] = target_object2
mirror_axis = self.mirror_axis(source_node.name())
target_object_exists = dcc.node_exists(target_object)
target_object2_exists = dcc.node_exists(target_object2)
if target_object_exists and target_object2_exists:
found_object = True
if animation:
self.transfer_animation(
target_object, target_object2, mirror_axis=mirror_axis, option=option, time=time)
else:
self.transfer_static(target_object, target_object2, mirror_axis=mirror_axis, option=option)
else:
if not target_object_exists:
logger.warning('Cannot find destination object {}'.format(target_object))
if not target_object2_exists:
logger.warning('Cannot find mirrored destination object {}'.format(target_object2))
dcc.focus_ui_panel('MayaWindow')
if not found_object:
raise exceptions.NoMatchFoundError('No objects match wne loading mirror table data')
def _transfer_animation(self, source_object, target_object, attrs=None, mirror_axis=None, time=None):
maya.cmds.cutKey(target_object, time=time or ())
if maya.cmds.copyKey(source_object, time=time or ()):
if not time:
maya.cmds.pasteKey(target_object, option='replaceCompletely')
else:
maya.cmds.pasteKey(target_object, time=time, option='replace')
if attrs is None:
attrs = maya.cmds.listAttr(source_object, keyable=True) or list()
for attr in attrs:
source_attr = utils.Attribute(source_object, attr)
target_attr = utils.Attribute(target_object, attr)
if target_attr.exists():
if target_attr.is_connected():
if self.is_attribute_mirrored(attr, mirror_axis):
maya.cmds.scaleKey(target_attr.name(), valueScale=-1, attribute=attr)
else:
value = source_attr.value
self.set_attribute(target_object, attr, value, mirror_axis=mirror_axis)
# ============================================================================================================
# INTERNAL
# ============================================================================================================
@staticmethod
def _right_replace(name, old, new, count=1):
"""
Internal callback function used by replace_prefix function
:param name: str
:param old: str
:param new: str
:param count: int
:return: str
"""
pass
| 36.676113 | 120 | 0.556242 |
e749573d3f72e34329e4365f90cb948132fb418c | 10,424 | py | Python | python/sklearn/linear-regression/workload-analysis/bench-gpu/post-process/roofline/roofline.py | lijiansong/lang | e255709da2b12e09dea45f86d54f77a19b96f13b | [
"WTFPL"
] | 1 | 2020-01-09T03:22:09.000Z | 2020-01-09T03:22:09.000Z | python/sklearn/linear-regression/workload-analysis/bench-gpu/post-process/roofline/roofline.py | lijiansong/lang | e255709da2b12e09dea45f86d54f77a19b96f13b | [
"WTFPL"
] | null | null | null | python/sklearn/linear-regression/workload-analysis/bench-gpu/post-process/roofline/roofline.py | lijiansong/lang | e255709da2b12e09dea45f86d54f77a19b96f13b | [
"WTFPL"
] | null | null | null | #!/usr/bin/env python3
from collections import OrderedDict
import matplotlib
import matplotlib.pyplot as plt
import seaborn as sns;
def extract_model_data(data_file_path, debug=True):
data_file_reader = open(data_file_path, 'r')
# key is net name, value is list of <batch size, gflops, intensity> tuples
gflops_intensity_dict = {}
try:
text_lines = data_file_reader.readlines()
# two lines, the first line is glops, the second is operational intensity
for i, line in enumerate(text_lines):
# extract the first line(GFLOPS) and then get the next line(Operational Intensity)
if i % 2 == 0:
# extract gflops
current_line = line.rstrip('\n')
gflops_list = current_line.split('\t')
# extract operational intensity
next_line = text_lines[i+1].rstrip('\n')
intensity_list = next_line.split('\t')
dict_values_list = []
for j, item in enumerate(gflops_list):
# the first item is net name
if j == 0:
continue
# batch size, gflops, op intensity
dict_values_list.append((2**(j-1), float(item), float(intensity_list[j])))
gflops_intensity_dict[gflops_list[0]] = dict_values_list
else:
continue
if debug:
print(gflops_intensity_dict)
finally:
data_file_reader.close()
return gflops_intensity_dict
def extract_op_data(data_file_path, debug=True):
data_file_reader = open(data_file_path, 'r')
# key is net name, value is list of <batch size, gflops, intensity> tuples
op_data_list = []
try:
text_lines = data_file_reader.readlines()
for line in text_lines:
line = line.rstrip('\n')
_, op_type, glops, intensity = line.split('\t')
op_data_list.append((op_type, float(glops), float(intensity)))
if debug:
print(op_data_list)
finally:
data_file_reader.close()
return op_data_list
# find out the max intensity and min gflops
def find_boundard_pairs(gflops_intensity_dict):
max_intensity = -1
min_flops = 1.79e+100
for k, v in gflops_intensity_dict.items():
for _, gflops, intensity in v:
max_intensity = max(max_intensity, intensity)
min_flops = min(min_flops, gflops)
return max_intensity, min_flops
def draw_model_roofline(gflops_intensity_dict, peak_flops, peak_membdw):
# set color palette for different dnns
net_type_set = {k for k in gflops_intensity_dict}
colors = sns.color_palette("hls", n_colors=len(net_type_set) + 2)
net_color_map = {val:i for i, val in enumerate(list(net_type_set))}
fig, ax = plt.subplots(figsize=(6, 6))
# 1. plot the <flops, intensity> pairs
for k, v in gflops_intensity_dict.items():
# k is net name
if k == 'MobileNetV1':
for batch_size, gflops, intensity in v:
ax.plot(intensity, gflops, 'x',
color=colors[net_color_map[k]], label=k, marker='x')
elif k == 'SqueezeNet':
for batch_size, gflops, intensity in v:
ax.plot(intensity, gflops, 'v',
color=colors[net_color_map[k]], label=k, marker='v')
elif k == 'DenseNet121':
for batch_size, gflops, intensity in v:
ax.plot(intensity, gflops, '*',
color=colors[net_color_map[k]], label=k, marker='*')
elif k == 'ResNet50':
for batch_size, gflops, intensity in v:
ax.plot(intensity, gflops, 's',
color=colors[net_color_map[k]], label=k, marker='s')
elif k == 'SSD_MobileNetV1':
for batch_size, gflops, intensity in v:
ax.plot(intensity, gflops, 'd',
color=colors[net_color_map[k]], label=k, marker='d')
elif k == 'SSD_VGG16':
for batch_size, gflops, intensity in v:
ax.plot(intensity, gflops, 'p',
color=colors[net_color_map[k]], label=k, marker='p')
# 2. plot the roof line
x1 = peak_flops / peak_membdw
y1 = peak_flops
max_op_intensity, min_flops = find_boundard_pairs(gflops_intensity_dict)
print('max intensity:', max_op_intensity, 'min flops:', min_flops)
if max_op_intensity < x1:
'''
for this case: -----
/
/*
/*
/* x
/ x
'''
ax.hlines(y=y1, xmin=x1,
xmax=x1+5, linewidth=1.5, color='red')
else:
ax.hlines(y=y1, xmin=x1,
xmax=max_op_intensity+10, linewidth=1.5, color='red')
x2 = min_flops/ peak_membdw
y2 = peak_membdw * x2
if x2 > x1:
'''
for this case: -------
\ * x
\ x *
\ * x
'''
x2 = peak_flops / peak_membdw - 0.1
y2 = (peak_flops / peak_membdw)*x2
print('x1:', x1, ' y1:', y1, ' x2:', x2, ' y2:', y2)
#ax.plot([x1, x2], [y1, y2], linewidth=1.5, color='red')
ax.plot([0.1, x1], [peak_membdw*0.1, y1], linewidth=1.5, color='red')
ax.set_yscale('log')
ax.set_xscale('log')
#plt.xscale('log', basex=2)
#plt.yscale('log', basey=2)
ax.set_ylabel('GFLOps/sec', fontsize=10)
ax.set_xlabel('Operational Intensity (FLOps/Byte)', fontsize=10)
handles, labels = ax.get_legend_handles_labels()
#print(labels)
labels_od = OrderedDict(zip(labels, handles))
ax.legend(labels_od.values(), labels_od.keys(), loc='upper left')
plt.show()
def draw_op_roofline(op_data_list, peak_flops, peak_membdw):
op_type_set = {record[0] for record in op_data_list}
colors = sns.color_palette("hls", n_colors=len(op_type_set) + 2)
layer_color_map = {val:i for i, val in enumerate(list(op_type_set))}
#print(layer_color_map)
fig, ax = plt.subplots(figsize=(6, 6))
# 1. plot the <flops, intensity> pairs
for i in op_data_list:
op_type, flops, intensity = str(i[0]), i[1], i[2]
if op_type == 'Convolution' or op_type == 'convolution':
ax.plot(intensity, flops, 'x',
color=colors[layer_color_map[op_type]], label=op_type, marker='x')
elif op_type == 'InnerProduct':
ax.plot(intensity, flops, 'v',
color=colors[layer_color_map[op_type]], label=op_type, marker='v')
elif op_type == 'Pooling' or op_type == 'pooling':
ax.plot(intensity, flops, '*',
color=colors[layer_color_map[op_type]], label=op_type, marker='*')
elif op_type == 'Scale' or op_type == 'scale':
ax.plot(intensity, flops, 's',
color=colors[layer_color_map[op_type]], label=op_type, marker='s')
elif op_type == 'Eltwise' or op_type == 'element-wise':
ax.plot(intensity, flops, 'd',
color=colors[layer_color_map[op_type]], label=op_type, marker='d')
elif op_type == 'ReLU' or op_type == 'relu':
ax.plot(intensity, flops, 'p',
color=colors[layer_color_map[op_type]], label=op_type, marker='p')
elif op_type == 'BatchNorm' or op_type == 'batchnorm':
ax.plot(intensity, flops, 'o',
color=colors[layer_color_map[op_type]], label=op_type, marker='o')
elif op_type == 'Softmax' or op_type == 'softmax':
ax.plot(intensity, flops, '+',
color=colors[layer_color_map[op_type]], label=op_type, marker='+')
elif op_type == 'LRN' or op_type == 'lrn':
ax.plot(intensity, flops, '^',
color=colors[layer_color_map[op_type]], label=op_type, marker='^')
elif op_type == 'GEMV' or op_type == 'gemv':
ax.plot(intensity, flops, '<',
color=colors[layer_color_map[op_type]], label=op_type, marker='<')
elif op_type == 'GEMM' or op_type == 'gemm':
ax.plot(intensity, flops, 'P',
color=colors[layer_color_map[op_type]], label=op_type, marker='P')
# 2. plot the roof line
x1 = peak_flops / peak_membdw
y1 = peak_flops
max_op_intensity = max([i[2] for i in op_data_list])
ax.hlines(y=y1, xmin=x1,
xmax=max_op_intensity+15, linewidth=1.5, color='red')
min_flops = min([i[1] for i in op_data_list])
x2 = min_flops / peak_membdw
y2 = peak_membdw * x2
ax.plot([x1, x2], [y1, y2], linewidth=1.5, color='red')
ax.set_yscale('log')
ax.set_xscale('log')
#plt.xscale('log', basex=2)
#plt.yscale('log', basey=2)
ax.set_ylabel('GFLOps/sec', fontsize=10)
ax.set_xlabel('Operational Intensity (FLOps/Byte)', fontsize=10)
handles, labels = ax.get_legend_handles_labels()
#print(labels)
labels_od = OrderedDict(zip(labels, handles))
ax.legend(labels_od.values(), labels_od.keys(), loc='upper left')
plt.show()
if __name__ == '__main__':
# Titan Xp 12.15 TFLOPS, 547.7 GB/s
# Tesla K40m 5.046 TFLOPS, 288.4 GB/s
titan_model_data = extract_model_data('titan_xp_model_throughput.txt')
titan_peak_flops = 12.15*1000
titan_peak_mem_bandwidth = 547.7
draw_model_roofline(titan_model_data, titan_peak_flops, titan_peak_mem_bandwidth)
k40m_model_data = extract_model_data('tesla_k40m_model_throughput.txt')
k40m_peak_flops = 5.046*1000
k40m_peak_mem_bandwidth = 288.4
#draw_model_roofline(k40m_model_data, titan_peak_flops, titan_peak_mem_bandwidth)
titan_op_data = extract_op_data('titan_xp_op_throughput.txt')
draw_op_roofline(titan_op_data, titan_peak_flops, titan_peak_mem_bandwidth)
k40m_op_data = extract_op_data('tesla_k40m_op_throughput.txt')
#draw_op_roofline(k40m_op_data, k40m_peak_flops, k40m_peak_mem_bandwidth)
tesla_k40m_mobilenet_op_data = extract_op_data('tesla_k40m_mobilenet_batch_op.txt')
#draw_op_roofline(tesla_k40m_mobilenet_op_data, k40m_peak_flops, k40m_peak_mem_bandwidth)
titan_xp_mobilenet_op_data = extract_op_data('titan_xp_mobilenet_batch_op.txt')
draw_op_roofline(titan_xp_mobilenet_op_data, titan_peak_flops, titan_peak_mem_bandwidth)
| 42.897119 | 94 | 0.603799 |
c2bbe10404c1210fdf2eb272eeb77f1802c695aa | 4,859 | py | Python | Algo_Ds_Notes-master/Algo_Ds_Notes-master/Binary_Heap/BinaryHeap.py | rajatenzyme/Coding-Journey- | 65a0570153b7e3393d78352e78fb2111223049f3 | [
"MIT"
] | null | null | null | Algo_Ds_Notes-master/Algo_Ds_Notes-master/Binary_Heap/BinaryHeap.py | rajatenzyme/Coding-Journey- | 65a0570153b7e3393d78352e78fb2111223049f3 | [
"MIT"
] | null | null | null | Algo_Ds_Notes-master/Algo_Ds_Notes-master/Binary_Heap/BinaryHeap.py | rajatenzyme/Coding-Journey- | 65a0570153b7e3393d78352e78fb2111223049f3 | [
"MIT"
] | null | null | null | class BinaryHeap():
def __init__(self):
# To enable 1-index based array operations
self.heapData = [0]
self.currentSize = 0
# If the min value is down in the tree, it needs to be bubbled up
# to the proper position within the tree
def bubbleUp(self, i):
# check that i is not at the root index
while i // 2 > 0:
if self.heapData[i] < self.heapData[i // 2]:
# swap the values so the lower value will get bubbled up
self.heapData[i // 2], self.heapData[i] = \
self.heapData[i], self.heapData[i // 2]
i = i // 2
# Append the value to the list, then bubble up values based on
# minimum heap property
def insert(self, k):
self.heapData.append(k)
self.currentSize += 1
self.bubbleUp(self.currentSize)
# Delete the minimum value (present at the root index)
def deleteMin(self):
if self.currentSize == 0:
return 'Binary Heap is empty'
# Minimum value is present at index[1] in the heapData list
min_val = self.heapData[1]
# Take the last element and move it to first position in the list
self.heapData[1] = self.heapData[self.currentSize]
self.currentSize -= 1
# Remove the last element in the list
self.heapData.pop()
# bubble down values as required based on minimum heap property from
# the root index
self.bubbleDown(1)
return min_val
# Check the smallest child - left child or right child, return its index
def minChildIdx(self, i):
if i * 2 + 1 > self.currentSize:
return i * 2
else:
# Check if left-child value is less than right-child value
if self.heapData[i * 2] < self.heapData[i * 2 + 1]:
return i * 2
else:
return i * 2 + 1
# If a larger value is up in the tree, it needs to be bubbled down
# to the proper position within the tree
def bubbleDown(self, i):
while (i * 2) <= self.currentSize:
# From the left or right child - get the index of the minimum value
min_child_idx = self.minChildIdx(i)
# if parent value is greater than child value - swap them
if self.heapData[i] > self.heapData[min_child_idx]:
self.heapData[i], self.heapData[min_child_idx] = \
self.heapData[min_child_idx], self.heapData[i]
# Repeat from index of the minimum child value
i = min_child_idx
# Build minimum heap from given numbers - can be represented in an array
def minHeapify(self, newlist):
i = len(newlist) // 2
self.currentSize = len(newlist)
self.heapData = [0] + newlist[:]
# Bubble down as required so that minimum heap property is maintained
while i > 0:
self.bubbleDown(i)
i -= 1
def printData(self):
return self.heapData[1::]
if __name__ == '__main__':
# Empty binary heap
binary_heap = BinaryHeap()
# Build minimum heap - using static input
binary_heap.minHeapify([2, 17, 12, 35, 59])
# Insert values into the heap
binary_heap.insert(23)
binary_heap.insert(89)
binary_heap.insert(46)
binary_heap.insert(55)
# Expected - [2, 17, 12, 35, 59, 23, 89, 46, 55]
print('Binary Heap Array representation - Static input')
print(binary_heap.printData())
print('Priority Queue - mininum value')
# Expected - 2
print(binary_heap.deleteMin())
# Expected - 12
print(binary_heap.deleteMin())
# Expected - 17
print(binary_heap.deleteMin())
# Expected - 23
print(binary_heap.deleteMin())
# Expected - 35
print(binary_heap.deleteMin())
# Expected - 46
print(binary_heap.deleteMin())
# Expected - 55
print(binary_heap.deleteMin())
# Expected - 59
print(binary_heap.deleteMin())
# Expected - 89
print(binary_heap.deleteMin())
# Expected - Binary Heap is empty
print(binary_heap.deleteMin())
# Using Dynamic input
len_binary_heap = int(input('Enter total number of elements in Binary Heap - '))
print(len_binary_heap)
binary_heap_dyn = BinaryHeap()
for _ in range(len_binary_heap):
binary_heap_dyn.insert(int(input('Enter binary heap value - ')))
print('Binary Heap Array representation - Dynamic input')
print(binary_heap_dyn.printData())
print('Priority Queue - mininum value')
for _ in range(len_binary_heap + 1):
print(binary_heap_dyn.deleteMin())
'''
Sample Input : [2,5,1,3,6]
Sample Output : 1, 3, 2, 5, 6
Deletion Order - 1, 2, 3, 5, 6
'''
| 27.765714 | 84 | 0.596007 |
8de04caf07e90ac0f9c974eb8d08d7fd4cef3aca | 19,022 | py | Python | barak/fitcont.py | ntejos/Barak | 1333dc1aa9ce9f5f339e3a0197523b4bdfa2dbbb | [
"BSD-3-Clause"
] | null | null | null | barak/fitcont.py | ntejos/Barak | 1333dc1aa9ce9f5f339e3a0197523b4bdfa2dbbb | [
"BSD-3-Clause"
] | null | null | null | barak/fitcont.py | ntejos/Barak | 1333dc1aa9ce9f5f339e3a0197523b4bdfa2dbbb | [
"BSD-3-Clause"
] | null | null | null | """ Functions and Classes used to fit an estimate of an unabsorbed
continuum to a QSO spectrum.
"""
# p2.6+ compatibility
from __future__ import division, print_function, unicode_literals
try:
unicode
except NameError:
unicode = basestring = str
import numpy as np
import matplotlib.pyplot as pl
import matplotlib.transforms as mtran
from .stats import Gaussian
from .utilities import between, stats, indexnear
from .convolve import convolve_psf
from .io import loadobj, saveobj
from .interp import AkimaSpline
from .sed import qso_template
import os
def spline_continuum(wa, fl, er, edges, minfrac=0.01, nsig=3.0,
resid_std=1.3, debug=False):
""" Fit a continuum to a chunk of a spectrum.
Very loosely based on the method in Aguirre et al. 2002.
Parameters
----------
wa : Wavelengths.
fl : Fluxes.
er : One sigma errors.
edges : Wavelengths giving the chunk edges.
minfrac = 0.01 : At least this fraction of pixels in a single chunk
contributes to the fit.
nsig = 3.0 : No. of sigma for rejection for clipping.
resid_std = 1.3 : Maximum residual st. dev. in a given chunk.
debug = False : If True, make helpful plots.
Returns
-------
Continuum array and spline points
"""
# Overview:
# (1) Calculate the median flux value for each wavelength chunk.
# (2) fit a 1st order spline (i.e. series of straight line
# segments) through the set of points given by the central
# wavelength for each chunk and the median flux value for each
# chunk.
# (3) Remove any flux values that fall more than nsig*er below
# the spline.
# Repeat 1-3 until the continuum converges on a solution (if it
# doesn't throw hands up in despair! Essential to choose a
# suitable first guess with small enough chunks).
if len(edges) < 2:
raise ValueError('must be at least two bin edges!')
wa,fl,er = (np.asarray(a, np.float64) for a in (wa,fl,er))
if debug:
ax = pl.gca()
ax.cla()
ax.plot(wa,fl)
ax.plot(wa,er)
ax.axhline(0, color='0.7')
good = ~np.isnan(fl) & ~np.isnan(er) & ~np.isinf(fl)
ymax = 2*np.percentile(fl[good], 0.90)
ax.set_ylim(-0.1*ymax, ymax)
ax.set_xlim(min(edges), max(edges))
ax.set_autoscale_on(0)
pl.draw()
npts = len(wa)
mask = np.ones(npts, bool)
oldco = np.zeros(npts, float)
co = np.zeros(npts, float)
# find indices of chunk edges and central wavelengths of chunks
indices = wa.searchsorted(edges)
indices = [(i0,i1) for i0,i1 in zip(indices[:-1],indices[1:])]
if debug: print(' indices', indices)
wavc = [0.5*(w1 + w2) for w1,w2 in zip(edges[:-1],edges[1:])]
# information per chunks
npts = len(indices)
mfl = np.zeros(npts, float) # median fluxes at chunk centres
goodfit = np.zeros(npts, bool) # is fit acceptable?
res_std = np.zeros(npts, float) # residuals standard dev
res_med = np.zeros(npts, float) # residuals median
if debug:
print('chunk centres', wavc)
cont, = ax.plot(wa,co,'k')
midpoints, = ax.plot(wavc, mfl,'rx',mew=1.5,ms=8)
# loop that iterative fits continuum
while True:
for i,(j1,j2) in enumerate(indices):
if goodfit[i]: continue
# calculate median flux
#print(i,j1,j2)
w,f,e,m = (item[j1:j2] for item in (wa,fl,er,mask))
ercond = (e > 0) & (~np.isnan(f))
cond = m & ercond
chfl = f[cond]
chflgood = f[ercond]
if len(chflgood) == 0: continue
#print(len(chfl), len(chflgood))
if float(len(chfl)) / len(chflgood) < minfrac:
f_cutoff = np.percentile(chflgood, minfrac)
cond = ercond & (f >= f_cutoff)
if len(f[cond]) == 0: continue
mfl[i] = np.median(f[cond])
# calculate the spline. add extra points on either end to give
# a nice slope at the end points.
extwavc = ([wavc[0] - (wavc[1] - wavc[0])] + list(wavc) +
[wavc[-1] + (wavc[-1] - wavc[-2])])
extmfl = ([mfl[0] - (mfl[1] - mfl[0])] + list(mfl) +
[mfl[-1] + (mfl[-1] - mfl[-2])])
co = np.interp(wa, extwavc, extmfl)
if debug:
cont.set_ydata(co)
midpoints.set_xdata(wavc)
midpoints.set_ydata(mfl)
pl.draw()
# calculate residuals for each chunk
for i,(j1,j2) in enumerate(indices):
if goodfit[i]: continue
ercond = er[j1:j2] > 0
cond = ercond & mask[j1:j2]
chfl = fl[j1:j2][cond]
chflgood = fl[j1:j2][ercond]
if len(chflgood) == 0: continue
if float(len(chfl)) / len(chflgood) < minfrac:
f_cutoff = np.percentile(chflgood, minfrac)
cond = ercond & (fl[j1:j2] > f_cutoff)
#print(len(co), len(fl), i1, j1, j2)
residuals = (fl[j1:j2][cond] - co[j1:j2][cond]
) / er[j1:j2][cond]
res_std[i] = residuals.std()
if len(residuals) == 0:
continue
res_med[i] = np.median(residuals)
# If residuals have std < 1.0 and mean ~1.0, we might have
# a reasonable fit.
if res_std[i] <= resid_std:
goodfit[i] = True
if debug:
print('median and st. dev. of residuals by region - aiming for 0,1')
for i,(f0,f1) in enumerate(zip(res_med, res_std)):
print('{0} {0:.2f} {0:.2f}'.format(i,f0,f1))
raw_input('Enter...')
# (3) Remove flux values that fall more than N*sigma below the
# spline fit.
cond = (co - fl) > nsig * er
if debug:
print(np.nanmax(np.abs(co - oldco)/co))
# Finish when the biggest change between the new and old
# medians is smaller than the number below.
if np.nanmax(np.abs(co - oldco)/co) < 4e-3:
break
oldco = co.copy()
mask[cond] = False
# finally fit a cubic spline through the median values to
# get a smooth continuum.
final = AkimaSpline(wavc, mfl)
return final(wa), list(zip(wavc,mfl))
def fitqsocont(wa, fl, er, redshift, oldco=None, knots=None,
nbin=1, divmult=1, forest_divmult=1, atmos=True, debug=False):
""" Find an estimate of a QSO continuum.
divmult=3 works well for R~40000, S/N~10, z=3 QSO spectrum.
nbin bins the data for plotting and continuum fitting (obsolete)
"""
# choose initial reference continuum points. Increase divmult for
# fewer initial continuum points (generally needed for poorer S/N
# spectra).
zp1 = 1 + redshift
#reflines = np.array([1025.72, 1215.6701, 1240.14, 1398.0,
# 1549.06, 1908, 2800 ])
# generate the edges of wavelength chunks to send to fitting routine
# these edges and divisions are generated by trial and error
# for S/N = 15ish and resolution = 2000ish
div = np.rec.fromrecords([(500. , 800. , 25),
(800. , 1190., 25),
(1190., 1213., 4),
(1213., 1230., 6),
(1230., 1263., 6),
(1263., 1290., 5),
(1290., 1340., 5),
(1340., 1370., 2),
(1370., 1410., 5),
(1410., 1515., 5),
(1515., 1600., 15),
(1600., 1800., 8),
(1800., 1900., 5),
(1900., 1940., 5),
(1940., 2240., 15),
(2240., 3000., 25),
(3000., 6000., 80),
(6000., 20000., 100),
], names=str('left,right,num'))
div.num[2:] = np.ceil(div.num[2:] * divmult)
div.num[:2] = np.ceil(div.num[:2] * forest_divmult)
div.left *= zp1
div.right *= zp1
if debug: print(div.tolist())
temp = [np.linspace(left, right, n+1)[:-1] for left,right,n in div]
edges = np.concatenate(temp)
if debug: stats(edges)
i0,i1,i2 = edges.searchsorted([wa[0], 1210*zp1, wa[-1]])
if debug: print(i0,i1,i2)
contpoints = []
if knots is not None:
contpoints.extend(knots)
else:
co,cp = spline_continuum(wa, fl, er, edges[i0:i2], debug=debug)
contpoints.extend(cp)
fig = pl.figure(figsize=(11, 7))
fig.subplots_adjust(left=0.05, right=0.95, bottom=0.1, top=0.95)
wrapper = InteractiveCoFit(wa, fl, er, contpoints, co=oldco, nbin=nbin,
redshift=redshift, fig=fig, atmos=atmos)
while True:
if wrapper.finished: break
pl.waitforbuttonpress()
return wrapper.continuum, wrapper.contpoints
class InteractiveCoFit(object):
help_message = """
'a' : add a new continuum point
'd' : delete the nearest point
'b' : add a break in the continuum
'r' : remove a break in the continuum
's' : smooth the spectrum
'k' : keep continuum
'q' : quit without keeping continuum
"""
def __init__(self, wa, fl, er, contpoints, co=None,
nbin=8, redshift=None, atmos=None, fig=None):
""" Initialise figure, plots and variables.
Parameters
----------
wa : Wavelengths
fl : Fluxes
er : One sigma errors
nbin : int (8)
Number of pixels to bin arrays in wavelength. Default 8.
contpoints : list of x,y tuple pairs (None)
The points through which a cubic spline is passed,
defining the continuum.
redshift : float (None)
Redshift used to plot reference emission lines.
atmos : list of wavelength pairs (None)
Regions of atmospheric absorption to plot.
Notes
-----
Updates the following attributes:
self.spec : Dictionary of wa, fl, er.
self.contpoints : Points used to define the continuum.
self.nbin : The input nbin value.
self.markers : Dictionary of matplotlib plotting artists.
self.connections : Callback connections.
self.fig : The plotting figure instance.
"""
#setup
#print co
self.WMIN_LYA = 1040
self.WMAX_LYA = 1190
self.spec = dict(wa=wa, fl=fl, er=er, co=co)
self.nbin = nbin
self.breaks = [wa[0], wa[-1]] # wavelengths of breaks in the continuum
self.contpoints = list(contpoints)
if os.path.lexists('./_knots.sav'):
c = raw_input('temporary knots file exists, use these knots? (y) ')
if c.lower() != 'n':
self.contpoints = loadobj('./_knots.sav')
self.markers = dict()
self.art_fl = None
if fig is None:
self.fig = pl.figure()
else:
self.fig = fig
# disable any existing key press callbacks
cids = list(fig.canvas.callbacks.callbacks['key_press_event'])
for cid in cids:
fig.canvas.callbacks.disconnect(cid)
self.template = None
if redshift is not None:
self.template = qso_template(wa, redshift)
self.connections = []
self.continuum = None
self.finished = False
self.redshift = redshift
self.atmos = atmos
self.smoothby = None
self.plotinit()
self.update()
self.modifypoints()
pl.draw()
def plotinit(self):
""" Set up the figure and do initial plots.
Updates the following attributes:
self.markers
"""
wa,fl,er = [self.spec[k][0:-1:self.nbin] for k in 'wa fl er'.split()]
if self.spec['co'] is not None:
co = self.spec['co'][0:-1:self.nbin]
# axis for spectrum & continuum
a0 = self.fig.add_axes((0.05,0.1,0.9,0.6))
a0.set_autoscale_on(0)
# axis for residuals
a1 = self.fig.add_axes((0.05,0.75,0.9,0.2),sharex=a0)
a1.set_autoscale_on(0)
a1.axhline(0,color='k',alpha=0.7, zorder=99)
a1.axhline(1,color='k',alpha=0.7, zorder=99)
a1.axhline(-1,color='k',alpha=0.7, zorder=99)
a1.axhline(2,color='k',linestyle='dashed',zorder=99)
a1.axhline(-2,color='k',linestyle='dashed',zorder=99)
m0, = a1.plot([0],[0],'.r',marker='.', mec='none', lw=0, mew=0, ms=6, alpha=0.5)
a1.set_ylim(-4, 4)
a0.axhline(0, color='0.7')
if self.spec['co'] is not None:
a0.plot(wa,co, color='0.7', lw=1, ls='dashed')
self.art_fl, = a0.plot(wa, fl, 'b', lw=0.5, linestyle='steps-mid')
a0.plot(wa, er, lw=0.5, color='orange')
m1, = a0.plot([0], [0], 'r', alpha=0.7)
m2, = a0.plot([0], [0], 'o', mfc='None',mew=1, ms=8, mec='r', picker=5,
alpha=0.7)
a0.set_xlim(min(wa), max(wa))
good = (er > 0) & ~np.isnan(fl) & ~np.isinf(fl)
ymax = 2 * np.abs(np.percentile(fl[good], 95))
a0.set_ylim(-0.1*ymax, ymax)
a0.text(0.9,0.9, 'z=%.2f' % self.redshift, transform=a0.transAxes)
# for histogram
trans = mtran.blended_transform_factory(a1.transAxes, a1.transData)
hist, = a1.plot([], [], color='k', transform=trans)
x = np.linspace(-3,3)
a1.plot(Gaussian(x,0,1,0.05), x, color='k', transform=trans, lw=0.5)
if self.template is not None:
trans = mtran.blended_transform_factory(a0.transData, a0.transAxes)
a0.plot(self.spec['wa'], self.template/self.template.max(), '-c', lw=2,
alpha=0.5, transform=trans)
self.fig.canvas.draw()
self.markers.update(contpoints=m2, cont=m1, resid=m0, hist_left=hist)
def update(self):
""" Calculates the new continuum, residuals and updates the plots.
Updates the following attributes:
self.markers
self.continuum
"""
wa,fl,er = (self.spec[key] for key in 'wa fl er'.split())
co = np.empty(len(wa))
co.fill(np.nan)
for b0,b1 in zip(self.breaks[:-1], self.breaks[1:]):
cpts = [(x,y) for x,y in self.contpoints if b0 <= x <= b1]
if len(cpts) < 3:
continue
spline = AkimaSpline(*list(zip(*cpts)))
i,j = wa.searchsorted([b0,b1])
co[i:j] = spline(wa[i:j])
resid = (fl - co) / er
# histogram
bins = np.arange(0, 5+0.1, 0.2)
w0,w1 = self.fig.axes[1].get_xlim()
x,_ = np.histogram(resid[between(wa, w0, w1)],
bins=bins)
b = np.repeat(bins, 2)
X = np.concatenate([[0], np.repeat(x,2), [0]])
Xmax = X.max()
X = 0.05 * X / Xmax
self.markers['hist_left'].set_data(X, b)
self.markers['contpoints'].set_data(list(zip(*self.contpoints)))
nbin = self.nbin
self.markers['cont'].set_data(wa[::nbin], co[::nbin])
self.markers['resid'].set_data(wa[::nbin], resid[::nbin])
if self.smoothby is not None:
sfl = convolve_psf(fl, self.smoothby)
self.art_fl.set_data(wa, sfl)
else:
self.art_fl.set_data(wa, fl)
self.continuum = co
saveobj('_knots.sav', self.contpoints, overwrite=True)
self.fig.canvas.draw()
def on_keypress(self, event):
""" Interactive fiddling via the keyboard
Updates:
self.contpoints
"""
if event.key == 'q':
for item in self.connections:
self.fig.canvas.mpl_disconnect(item)
self.contpoints = None
self.continuum = None
self.finished = True
return
if event.key == 'k':
for item in self.connections:
self.fig.canvas.mpl_disconnect(item)
self.finished = True
return
if event.inaxes != self.fig.axes[0]: return
if event.key == 'a':
# add a point to contpoints
x,y = event.xdata,event.ydata
if x not in zip(*self.contpoints)[0]:
self.contpoints.append((x,y))
self.update()
elif event.key == 'd':
# remove a point from contpoints
contx,conty = zip(*self.contpoints)
sep = np.hypot(event.xdata - contx, event.ydata - conty)
self.contpoints.remove(self.contpoints[sep.argmin()])
self.update()
elif event.key == 'm':
# Move a point
contx,conty = zip(*self.contpoints)
sep = np.hypot(event.xdata - contx, event.ydata - conty)
#import pdb
#pdb.set_trace()
self.contpoints[sep.argmin()] = (event.xdata,event.ydata)
self.update()
elif event.key == 'b':
# Add a break to the continuum.
self.breaks.append(event.xdata)
self.breaks.sort()
self.update()
elif event.key == 'r':
# remove a break
i = indexnear(self.breaks, event.xdata)
if i not in (0, len(self.breaks)-1):
self.breaks.remove(self.breaks[i])
self.update()
elif event.key == 'S':
# Save fit to a temporary file
print( 'fitcont: Writing output to temporary file tmp.sav')
saveobj('tmp.sav', (self.continuum, self.contpoints), overwrite=1)
elif event.key == 's':
c = raw_input('New FWHM in pixels of Gaussian to convolve with? '
'(blank for no smoothing) ')
if c == '':
# restore spectrum
self.smoothby = None
self.update()
else:
try:
fwhm = float(c)
except TypeError:
print('FWHM must be a floating point number >= 1')
if fwhm < 1:
self.smoothby = None
else:
self.smoothby = fwhm
self.update()
elif event.key == '?':
print(self.help_message)
def on_button_release(self, event):
self.update()
def modifypoints(self):
""" Add/remove continuum points."""
print(self.help_message)
id1 = self.fig.canvas.mpl_connect('key_press_event',self.on_keypress)
id2 = self.fig.canvas.mpl_connect('button_release_event',self.on_button_release)
self.connections.extend([id1, id2])
| 36.793037 | 95 | 0.535958 |
461449409c309f261d09444bfc794646f15726ad | 12,774 | py | Python | src/python/pants/base/deprecated.py | anthonyjpratti/pants | d98e53af6ddd877861231bce8343f8204da0a9d1 | [
"Apache-2.0"
] | null | null | null | src/python/pants/base/deprecated.py | anthonyjpratti/pants | d98e53af6ddd877861231bce8343f8204da0a9d1 | [
"Apache-2.0"
] | null | null | null | src/python/pants/base/deprecated.py | anthonyjpratti/pants | d98e53af6ddd877861231bce8343f8204da0a9d1 | [
"Apache-2.0"
] | null | null | null | # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import inspect
import sys
import warnings
from contextlib import contextmanager
from functools import wraps
from typing import Callable, Iterator, Optional
from packaging.version import InvalidVersion, Version
from pants.util.memo import memoized_method
from pants.version import PANTS_SEMVER
class DeprecationApplicationError(Exception):
"""The base exception type thrown for any form of @deprecation application error."""
class MissingSemanticVersionError(DeprecationApplicationError):
"""Indicates the required removal_version was not supplied."""
class BadSemanticVersionError(DeprecationApplicationError):
"""Indicates the supplied removal_version was not a valid semver string."""
class NonDevSemanticVersionError(DeprecationApplicationError):
"""Indicates the supplied removal_version was not a pre-release version."""
class InvalidSemanticVersionOrderingError(DeprecationApplicationError):
"""Indicates that multiple semantic version strings were provided in an inconsistent ordering."""
class CodeRemovedError(Exception):
"""Indicates that the removal_version is not in the future.
I.e., that the option/function/module with that removal_version has already been removed.
Note, the code in question may not actually have been excised from the codebase yet, but
it may be at any time, and no control paths access it.
"""
class BadDecoratorNestingError(DeprecationApplicationError):
"""Indicates the @deprecated decorator was innermost in a sequence of layered decorators."""
def get_deprecated_tense(
removal_version: str, future_tense: str = 'will be', past_tense: str = 'was'
) -> str:
"""Provides the grammatical tense for a given deprecated version vs the current version."""
return future_tense if (Version(removal_version) >= PANTS_SEMVER) else past_tense
@memoized_method
def validate_deprecation_semver(version_string: str, version_description: str) -> Version:
"""Validates that version_string is a valid semver.
If so, returns that semver. Raises an error otherwise.
:param version_string: A pantsbuild.pants version which affects some deprecated entity.
:param version_description: A string used in exception messages to describe what the
`version_string` represents.
:raises DeprecationApplicationError: if the version_string parameter is invalid.
"""
if version_string is None:
raise MissingSemanticVersionError('The {} must be provided.'.format(version_description))
if not isinstance(version_string, str):
raise BadSemanticVersionError('The {} must be a version string.'.format(version_description))
try:
# NB: packaging will see versions like 1.a.0 as 1a0, and are "valid"
# We explicitly want our versions to be of the form x.y.z.
v = Version(version_string)
if len(v.base_version.split('.')) != 3:
raise BadSemanticVersionError('The given {} is not a valid version: '
'{}'.format(version_description, version_string))
if not v.is_prerelease:
raise NonDevSemanticVersionError('The given {} is not a dev version: {}\n'
'Features should generally be removed in the first `dev` release '
'of a release cycle.'.format(version_description, version_string))
return v
except InvalidVersion as e:
raise BadSemanticVersionError('The given {} {} is not a valid version: '
'{}'.format(version_description, version_string, e))
def _get_frame_info(stacklevel: int, context: int = 1) -> inspect.FrameInfo:
"""Get a Traceback for the given `stacklevel`.
For example:
`stacklevel=0` means this function's frame (_get_frame_info()).
`stacklevel=1` means the calling function's frame.
See https://docs.python.org/2/library/inspect.html#inspect.getouterframes for more info.
NB: If `stacklevel` is greater than the number of actual frames, the outermost frame is used
instead.
"""
frame_list = inspect.getouterframes(inspect.currentframe(), context=context)
frame_stack_index = stacklevel if stacklevel < len(frame_list) else len(frame_list) - 1
return frame_list[frame_stack_index]
@contextmanager
def _greater_warnings_context(context_lines_string: str) -> Iterator[None]:
"""Provide the `line` argument to warnings.showwarning().
warnings.warn_explicit() doesn't use the `line` argument to showwarning(), but we want to
make use of the warning filtering provided by warn_explicit(). This contextmanager overwrites the
showwarning() method to pipe in the desired amount of context lines when using warn_explicit().
"""
prev_showwarning = warnings.showwarning
def wrapped(message, category, filename, lineno, file=None, line=None):
return prev_showwarning(
message=message,
category=category,
filename=filename,
lineno=lineno,
file=file,
line=(line or context_lines_string))
warnings.showwarning = wrapped
yield
warnings.showwarning = prev_showwarning
# TODO: propagate `deprecation_start_version` to other methods in this file!
def warn_or_error(
removal_version: str,
deprecated_entity_description: str,
hint: Optional[str] = None,
deprecation_start_version: Optional[str] = None,
stacklevel: int = 3,
frame_info: Optional[inspect.FrameInfo] = None,
context: int = 1,
ensure_stderr: bool = False,
print_warning: bool = True,
) -> None:
"""Check the removal_version against the current pants version.
Issues a warning if the removal version is > current pants version, or an error otherwise.
:param removal_version: The pantsbuild.pants version at which the deprecated entity will
be/was removed.
:param deprecated_entity_description: A short description of the deprecated entity, that
we can embed in warning/error messages.
:param hint: A message describing how to migrate from the removed entity.
:param deprecation_start_version: The pantsbuild.pants version at which the entity will
begin to display a deprecation warning. This must be less
than the `removal_version`. If not provided, the
deprecation warning is always displayed.
:param stacklevel: The stacklevel to pass to warnings.warn.
:param frame_info: If provided, use this frame info instead of getting one from `stacklevel`.
:param context: The number of lines of source code surrounding the selected frame to display
in a warning message.
:param ensure_stderr: Whether use warnings.warn, or use warnings.showwarning to print
directly to stderr.
:param print_warning: Whether to print a warning for deprecations *before* their removal.
If this flag is off, an exception will still be raised for options
past their deprecation date.
:raises DeprecationApplicationError: if the removal_version parameter is invalid.
:raises CodeRemovedError: if the current version is later than the version marked for removal.
"""
removal_semver = validate_deprecation_semver(removal_version, 'removal version')
if deprecation_start_version:
deprecation_start_semver = validate_deprecation_semver(
deprecation_start_version, 'deprecation start version')
if deprecation_start_semver >= removal_semver:
raise InvalidSemanticVersionOrderingError(
'The deprecation start version {} must be less than the end version {}.'
.format(deprecation_start_version, removal_version))
elif PANTS_SEMVER < deprecation_start_semver:
return
msg = 'DEPRECATED: {} {} removed in version {}.'.format(deprecated_entity_description,
get_deprecated_tense(removal_version), removal_version)
if hint:
msg += '\n {}'.format(hint)
# We need to have filename and line_number for warnings.formatwarning, which appears to be the only
# way to get a warning message to display to stderr. We get that from frame_info -- it's too bad
# we have to reconstruct the `stacklevel` logic ourselves, but we do also gain the ability to have
# multiple lines of context, which is neat.
if frame_info is None:
frame_info = _get_frame_info(stacklevel, context=context)
_, filename, line_number, _, code_context, _ = frame_info
if code_context:
context_lines = ''.join(code_context)
else:
context_lines = '<no code context available>'
if removal_semver > PANTS_SEMVER:
if ensure_stderr:
# No warning filters can stop us from printing this message directly to stderr.
warning_msg = warnings.formatwarning(
msg, DeprecationWarning, filename, line_number, line=context_lines)
print(warning_msg, file=sys.stderr)
elif print_warning:
# This output is filtered by warning filters.
with _greater_warnings_context(context_lines):
warnings.warn_explicit(
message=msg,
category=DeprecationWarning,
filename=filename,
lineno=line_number)
return
else:
raise CodeRemovedError(msg)
def deprecated_conditional(
predicate: Callable[[], bool],
removal_version: str,
entity_description: str,
hint_message: Optional[str] = None,
stacklevel: int = 4
) -> None:
"""Marks a certain configuration as deprecated.
The predicate is used to determine if that configuration is deprecated. It is a function that
will be called, if true, then the deprecation warning will issue.
:param predicate: A function that returns True if the deprecation warning should be on.
:param removal_version: The pants version which will remove the deprecated functionality.
:param entity_description: A description of the deprecated entity.
:param hint_message: An optional hint pointing to alternatives to the deprecation.
:param stacklevel: How far up in the stack do we go to find the calling fn to report
:raises DeprecationApplicationError if the deprecation is applied improperly.
"""
validate_deprecation_semver(removal_version, 'removal version')
if predicate():
warn_or_error(removal_version, entity_description, hint_message, stacklevel=stacklevel)
def deprecated(
removal_version: str,
hint_message: Optional[str] = None,
subject: Optional[str] = None,
ensure_stderr: bool = False
):
"""Marks a function or method as deprecated.
A removal version must be supplied and it must be greater than the current 'pantsbuild.pants'
version.
When choosing a removal version there is a natural tension between the code-base, which benefits
from short deprecation cycles, and the user-base which may prefer to deal with deprecations less
frequently. As a rule of thumb, if the hint message can fully convey corrective action
succinctly and you judge the impact to be on the small side (effects custom tasks as opposed to
effecting BUILD files), lean towards the next release version as the removal version; otherwise,
consider initiating a discussion to win consensus on a reasonable removal version.
:param removal_version: The pantsbuild.pants version which will remove the deprecated
function.
:param hint_message: An optional hint pointing to alternatives to the deprecation.
:param subject: The name of the subject that has been deprecated for logging clarity. Defaults
to the name of the decorated function/method.
:param ensure_stderr: Forwarded to `ensure_stderr` in warn_or_error().
:raises DeprecationApplicationError if the @deprecation is applied improperly.
"""
validate_deprecation_semver(removal_version, 'removal version')
def decorator(func):
if not inspect.isfunction(func):
raise BadDecoratorNestingError('The @deprecated decorator must be applied innermost of all '
'decorators.')
func_full_name = '{}.{}'.format(func.__module__, func.__name__)
@wraps(func)
def wrapper(*args, **kwargs):
warn_or_error(removal_version, subject or func_full_name, hint_message,
ensure_stderr=ensure_stderr)
return func(*args, **kwargs)
return wrapper
return decorator
def deprecated_module(removal_version: str, hint_message: Optional[str] = None) -> None:
"""Marks an entire module as deprecated.
Add a call to this at the top of the deprecated module, and it will print a warning message
when the module is imported.
Arguments are as for deprecated(), above.
"""
warn_or_error(removal_version, 'module', hint_message)
| 44.048276 | 104 | 0.732347 |
75a908d4fc83d64a620e82c23d539bd90fc65bba | 5,281 | py | Python | acme/acme/crypto_util_test.py | cybersimon/certbot | 174e59486338b8f3e17300ef9937a3182e733ac7 | [
"Apache-2.0"
] | 2 | 2016-07-14T14:26:42.000Z | 2016-10-04T04:52:21.000Z | acme/acme/crypto_util_test.py | cpu/certbot | 9fead41aaf93dde0d36d4aef6fded8dd306c1ddc | [
"Apache-2.0"
] | null | null | null | acme/acme/crypto_util_test.py | cpu/certbot | 9fead41aaf93dde0d36d4aef6fded8dd306c1ddc | [
"Apache-2.0"
] | null | null | null | """Tests for acme.crypto_util."""
import itertools
import socket
import threading
import time
import unittest
import six
from six.moves import socketserver # pylint: disable=import-error
import OpenSSL
from acme import errors
from acme import jose
from acme import test_util
class SSLSocketAndProbeSNITest(unittest.TestCase):
"""Tests for acme.crypto_util.SSLSocket/probe_sni."""
def setUp(self):
self.cert = test_util.load_comparable_cert('cert.pem')
key = test_util.load_pyopenssl_private_key('rsa512_key.pem')
# pylint: disable=protected-access
certs = {b'foo': (key, self.cert.wrapped)}
from acme.crypto_util import SSLSocket
class _TestServer(socketserver.TCPServer):
# pylint: disable=too-few-public-methods
# six.moves.* | pylint: disable=attribute-defined-outside-init,no-init
def server_bind(self): # pylint: disable=missing-docstring
self.socket = SSLSocket(socket.socket(), certs=certs)
socketserver.TCPServer.server_bind(self)
self.server = _TestServer(('', 0), socketserver.BaseRequestHandler)
self.port = self.server.socket.getsockname()[1]
self.server_thread = threading.Thread(
# pylint: disable=no-member
target=self.server.handle_request)
self.server_thread.start()
time.sleep(1) # TODO: avoid race conditions in other way
def tearDown(self):
self.server_thread.join()
def _probe(self, name):
from acme.crypto_util import probe_sni
return jose.ComparableX509(probe_sni(
name, host='127.0.0.1', port=self.port))
def test_probe_ok(self):
self.assertEqual(self.cert, self._probe(b'foo'))
def test_probe_not_recognized_name(self):
self.assertRaises(errors.Error, self._probe, b'bar')
# TODO: py33/py34 tox hangs forever on do_hendshake in second probe
#def probe_connection_error(self):
# self._probe(b'foo')
# #time.sleep(1) # TODO: avoid race conditions in other way
# self.assertRaises(errors.Error, self._probe, b'bar')
class PyOpenSSLCertOrReqSANTest(unittest.TestCase):
"""Test for acme.crypto_util._pyopenssl_cert_or_req_san."""
@classmethod
def _call(cls, loader, name):
# pylint: disable=protected-access
from acme.crypto_util import _pyopenssl_cert_or_req_san
return _pyopenssl_cert_or_req_san(loader(name))
@classmethod
def _get_idn_names(cls):
"""Returns expected names from '{cert,csr}-idnsans.pem'."""
chars = [six.unichr(i) for i in itertools.chain(range(0x3c3, 0x400),
range(0x641, 0x6fc),
range(0x1820, 0x1877))]
return [''.join(chars[i: i + 45]) + '.invalid'
for i in range(0, len(chars), 45)]
def _call_cert(self, name):
return self._call(test_util.load_cert, name)
def _call_csr(self, name):
return self._call(test_util.load_csr, name)
def test_cert_no_sans(self):
self.assertEqual(self._call_cert('cert.pem'), [])
def test_cert_two_sans(self):
self.assertEqual(self._call_cert('cert-san.pem'),
['example.com', 'www.example.com'])
def test_cert_hundred_sans(self):
self.assertEqual(self._call_cert('cert-100sans.pem'),
['example{0}.com'.format(i) for i in range(1, 101)])
def test_cert_idn_sans(self):
self.assertEqual(self._call_cert('cert-idnsans.pem'),
self._get_idn_names())
def test_csr_no_sans(self):
self.assertEqual(self._call_csr('csr-nosans.pem'), [])
def test_csr_one_san(self):
self.assertEqual(self._call_csr('csr.pem'), ['example.com'])
def test_csr_two_sans(self):
self.assertEqual(self._call_csr('csr-san.pem'),
['example.com', 'www.example.com'])
def test_csr_six_sans(self):
self.assertEqual(self._call_csr('csr-6sans.pem'),
['example.com', 'example.org', 'example.net',
'example.info', 'subdomain.example.com',
'other.subdomain.example.com'])
def test_csr_hundred_sans(self):
self.assertEqual(self._call_csr('csr-100sans.pem'),
['example{0}.com'.format(i) for i in range(1, 101)])
def test_csr_idn_sans(self):
self.assertEqual(self._call_csr('csr-idnsans.pem'),
self._get_idn_names())
class RandomSnTest(unittest.TestCase):
"""Test for random certificate serial numbers."""
def setUp(self):
self.cert_count = 5
self.serial_num = []
self.key = OpenSSL.crypto.PKey()
self.key.generate_key(OpenSSL.crypto.TYPE_RSA, 2048)
def test_sn_collisions(self):
from acme.crypto_util import gen_ss_cert
for _ in range(self.cert_count):
cert = gen_ss_cert(self.key, ['dummy'], force_san=True)
self.serial_num.append(cert.get_serial_number())
self.assertTrue(len(set(self.serial_num)) > 1)
if __name__ == '__main__':
unittest.main() # pragma: no cover
| 34.97351 | 82 | 0.629426 |
f3db1fd8893381de62d48d92c978034dc161498d | 11,675 | py | Python | lib/bx/align/score.py | tweirick/bx-python | f16a57e9f0a133ab4d62aed6fec087b8ce4ec848 | [
"MIT"
] | 2 | 2021-01-19T00:36:14.000Z | 2021-04-28T12:09:15.000Z | lib/bx/align/score.py | tweirick/bx-python | f16a57e9f0a133ab4d62aed6fec087b8ce4ec848 | [
"MIT"
] | null | null | null | lib/bx/align/score.py | tweirick/bx-python | f16a57e9f0a133ab4d62aed6fec087b8ce4ec848 | [
"MIT"
] | null | null | null | """
Support for scoring alignments using arbitrary scoring matrices, arbitrary
alphabets, and affine gap penalties.
"""
from numpy import *
class ScoringScheme( object ):
# note that gap_open and gap_extend are penalties, which means you should make them positive
def __init__( self, gap_open, gap_extend, default=-100, alphabet1="ACGT", alphabet2=None, gap1="-", gap2=None, text1_range=128, text2_range=None, typecode=int32 ):
if (text2_range == None): text2_range = text1_range
if (alphabet2 == None): alphabet2 = alphabet1
if (gap2 == None): gap2 = gap1 # (scheme with gap1=gap2=None is legit)
if type(alphabet1) == str: alphabet1 = [ch for ch in alphabet1]
if type(alphabet2) == str: alphabet2 = [ch for ch in alphabet2]
self.table = ones( (text1_range, text2_range), typecode )
self.table *= default
self.gap_open = gap_open
self.gap_extend = gap_extend
self.gap1 = gap1
self.gap2 = gap2
self.alphabet1 = alphabet1
self.alphabet2 = alphabet2
# private _set_score and _get_score allow subclasses to override them to
# implement a different underlying table object
def _set_score(self, a_b_pair,val):
(a,b) = a_b_pair
self.table[a,b] = val
def _get_score(self, a_b_pair):
(a,b) = a_b_pair
return self.table[a,b]
def set_score( self, a, b, val, foldcase1=False, foldcase2=False ):
self._set_score((a,b),val)
if foldcase1:
aCh = chr(a)
if (aCh.isupper()): aa = ord(aCh.lower())
elif (aCh.islower()): aa = ord(aCh.upper())
else: foldcase1 = False
if foldcase2:
bCh = chr(b)
if (bCh.isupper()): bb = ord(bCh.lower())
elif (bCh.islower()): bb = ord(bCh.upper())
else: foldcase2 = False
if foldcase1 and foldcase2:
self._set_score((aa,b ),val)
self._set_score((a ,bb),val)
self._set_score((aa,bb),val)
elif foldcase1:
self._set_score((aa,b ),val)
elif foldcase2:
self._set_score((a ,bb),val)
def score_alignment( self, a ):
return score_alignment(self,a)
def score_texts( self, text1, text2 ):
return score_texts( self, text1, text2 )
def __str__ (self):
isDna1 = "".join( self.alphabet1 ) == "ACGT"
isDna2 = "".join( self.alphabet2 ) == "ACGT"
labelRows = not ( isDna1 and isDna2 )
width = 3
for a in self.alphabet1:
for b in self.alphabet2:
score = self._get_score((ord(a),ord(b)))
if (type(score) == float): s = "%8.6f" % score
else: s = "%s" % score
if (len(s)+1 > width):
width = len(s)+1
lines = []
line = []
if labelRows:
if isDna1: line.append(" ")
else: line.append(" ")
for b in self.alphabet2:
if isDna2: s = b
else: s = "%02X" % ord(b)
line.append("%*s" % (width,s))
lines.append(("".join(line))+"\n")
for a in self.alphabet1:
line = []
if labelRows:
if isDna1: line.append(a)
else: line.append("%02X" % ord(a))
for b in self.alphabet2:
score = self._get_score((ord(a),ord(b)))
if (type(score) == float): s = "%8.6f" % score
else: s = "%s" % score
line.append("%*s" % (width,s))
lines.append(("".join(line))+"\n")
return "".join(lines)
def read_scoring_scheme( f, gap_open, gap_extend, gap1="-", gap2=None, **kwargs ):
"""
Initialize scoring scheme from a file containint a blastz style text blob.
f can be either a file or the name of a file.
"""
close_it = False
if (type(f) == str):
f = file(f,"rt")
close_it = True
ss = build_scoring_scheme("".join([line for line in f]),gap_open, gap_extend, gap1=gap1, gap2=gap2, **kwargs)
if (close_it):
f.close()
return ss
def build_scoring_scheme( s, gap_open, gap_extend, gap1="-", gap2=None, **kwargs ):
"""
Initialize scoring scheme from a blastz style text blob, first line
specifies the bases for each row/col, subsequent lines contain the
corresponding scores. Slaw extensions allow for unusual and/or
asymmetric alphabets. Symbols can be two digit hex, and each row
begins with symbol. Note that a row corresponds to a symbol in text1
and a column to a symbol in text2.
examples:
blastz slaw
A C G T 01 02 A C G T
91 -114 -31 -123 01 200 -200 -50 100 -50 100
-114 100 -125 -31 02 -200 200 100 -50 100 -50
-31 -125 100 -114
-123 -31 -114 91
"""
# perform initial parse to determine alphabets and locate scores
bad_matrix = "invalid scoring matrix"
s = s.rstrip( "\n" )
lines = s.split( "\n" )
rows = []
symbols2 = lines.pop(0).split()
symbols1 = None
rows_have_syms = False
a_la_blastz = True
for i, line in enumerate( lines ):
row_scores = line.split()
if len( row_scores ) == len( symbols2 ): # blastz-style row
if symbols1 == None:
if len( lines ) != len( symbols2 ):
raise bad_matrix
symbols1 = symbols2
elif (rows_have_syms):
raise bad_matrix
elif len( row_scores ) == len( symbols2 ) + 1: # row starts with symbol
if symbols1 == None:
symbols1 = []
rows_have_syms = True
a_la_blastz = False
elif not rows_have_syms:
raise bad_matrix
symbols1.append( row_scores.pop(0) )
else:
raise bad_matrix
rows.append( row_scores )
# convert alphabets from strings to characters
try:
alphabet1 = [sym_to_char( sym ) for sym in symbols1]
alphabet2 = [sym_to_char( sym ) for sym in symbols2]
except ValueError:
raise bad_matrix
if (alphabet1 != symbols1) or (alphabet2 != symbols2):
a_la_blastz = False
if a_la_blastz:
alphabet1 = [ch.upper() for ch in alphabet1]
alphabet2 = [ch.upper() for ch in alphabet2]
# decide if rows and/or columns should reflect case
if a_la_blastz:
foldcase1 = foldcase2 = True
else:
foldcase1 = "".join( alphabet1 ) == "ACGT"
foldcase2 = "".join( alphabet2 ) == "ACGT"
# create appropriately sized matrix
text1_range = text2_range = 128
if ord( max( alphabet1 ) ) >= 128: text1_range = 256
if ord( max( alphabet2 ) ) >= 128: text2_range = 256
typecode = int32
for i, row_scores in enumerate( rows ):
for j, score in enumerate( map( int_or_float, row_scores ) ):
if type( score ) == float:
typecode = float32
if type( gap_open ) == float:
typecode = float32
if type( gap_extend ) == float:
typecode = float32
ss = ScoringScheme( gap_open, gap_extend, alphabet1=alphabet1, alphabet2=alphabet2, gap1=gap1, gap2=gap2, text1_range=text1_range, text2_range=text2_range, typecode=typecode, **kwargs )
# fill matrix
for i, row_scores in enumerate( rows ):
for j, score in enumerate( map( int_or_float, row_scores ) ):
ss.set_score( ord( alphabet1[i] ), ord( alphabet2[j] ), score )
if foldcase1 and foldcase2:
ss.set_score( ord( alphabet1[i].lower() ), ord( alphabet2[j].upper() ), score )
ss.set_score( ord( alphabet1[i].upper() ), ord( alphabet2[j].lower() ), score )
ss.set_score( ord( alphabet1[i].lower() ), ord( alphabet2[j].lower() ), score )
elif foldcase1:
ss.set_score( ord( alphabet1[i].lower() ), ord( alphabet2[j] ), score )
elif foldcase2:
ss.set_score( ord( alphabet1[i] ), ord( alphabet2[j].lower() ), score )
return ss
def int_or_float( s ):
try: return int( s )
except: return float( s )
# convert possible two-char symbol to a single character
def sym_to_char( sym ):
if len( sym ) == 1: return sym
elif len( sym ) != 2: raise ValueError
else: return chr(int(sym,base=16))
def score_alignment( scoring_scheme, a ):
score = 0
ncomps = len( a.components )
for i in range( ncomps ):
for j in range( i+1, ncomps ):
score += score_texts( scoring_scheme, a.components[i].text, a.components[j].text )
return score
def score_texts( scoring_scheme, text1, text2 ):
rval = 0
last_gap_a = last_gap_b = False
for i in range( len( text1 ) ):
a = text1[i]
b = text2[i]
# Ignore gap/gap pair
if a == scoring_scheme.gap1 and b == scoring_scheme.gap2:
continue
# Gap in first species
elif a == scoring_scheme.gap1:
rval -= scoring_scheme.gap_extend
if not last_gap_a:
rval -= scoring_scheme.gap_open
last_gap_a = True
last_gap_b = False
# Gap in second species
elif b == scoring_scheme.gap2:
rval -= scoring_scheme.gap_extend
if not last_gap_b:
rval -= scoring_scheme.gap_open
last_gap_a = False
last_gap_b = True
# Aligned base
else:
rval += scoring_scheme._get_score((ord(a),ord(b)))
last_gap_a = last_gap_b = False
return rval
def accumulate_scores( scoring_scheme, text1, text2, skip_ref_gaps=False ):
"""
Return cumulative scores for each position in alignment as a 1d array.
If `skip_ref_gaps` is False positions in returned array correspond to each
column in alignment, if True they correspond to each non-gap position (each
base) in text1.
"""
if skip_ref_gaps:
rval = zeros( len( text1 ) - text1.count( scoring_scheme.gap1 ) )
else:
rval = zeros( len( text1 ) )
score = 0
pos = 0
last_gap_a = last_gap_b = False
for i in range( len( text1 ) ):
a = text1[i]
b = text2[i]
# Ignore gap/gap pair
if a == scoring_scheme.gap1 and b == scoring_scheme.gap2:
continue
# Gap in first species
elif a == scoring_scheme.gap1:
score -= scoring_scheme.gap_extend
if not last_gap_a:
score -= scoring_scheme.gap_open
last_gap_a = True
last_gap_b = False
# Gap in second species
elif b == scoring_scheme.gap2:
score -= scoring_scheme.gap_extend
if not last_gap_b:
score -= scoring_scheme.gap_open
last_gap_a = False
last_gap_b = True
# Aligned base
else:
score += scoring_scheme._get_score((ord(a),ord(b)))
last_gap_a = last_gap_b = False
if not( skip_ref_gaps ) or a != scoring_scheme.gap1:
rval[pos] = score
pos += 1
return rval
hox70 = build_scoring_scheme( """ A C G T
91 -114 -31 -123
-114 100 -125 -31
-31 -125 100 -114
-123 -31 -114 91 """, 400, 30 )
| 39.710884 | 189 | 0.554176 |
981e052816d53be8b67e9d9fdb80ab48ba6fc2d5 | 632 | py | Python | core.py | Serubin/Mangler | 9f53b464f86dcefc00d72346f369672f9e947119 | [
"MIT"
] | 1 | 2016-09-30T21:14:53.000Z | 2016-09-30T21:14:53.000Z | core.py | Serubin/Mangler | 9f53b464f86dcefc00d72346f369672f9e947119 | [
"MIT"
] | 1 | 2018-01-04T04:06:45.000Z | 2018-01-04T04:06:45.000Z | core.py | Serubin/Mangler | 9f53b464f86dcefc00d72346f369672f9e947119 | [
"MIT"
] | 1 | 2019-01-21T17:37:03.000Z | 2019-01-21T17:37:03.000Z | from flask import Flask
from flask_restful import Resource, Api
VERSION = "0.0.1-dev"
VERSION_HASH = ""
app = Flask(__name__)
api = Api(app)
class Index(Resource):
""" Index Handler """
def get(self):
return {'code': '200', 'version': getVersionHash()}
api.add_resource(HelloWorld, '/')
def APIResponse():
pass
def getVersionHash():
""" Retrieves Version Hash
Checks saved version hash first
"""
if VERSION_HASH == "":
VERSION_HASH = subprocess.call(["git", "log", "--format='%h'", "-n" "1"])
return VERSION_HASH
if __name__ == '__main__':
app.run(debug=True)
| 18.588235 | 81 | 0.618671 |
fa16296e7efb8962668df664d8de97a90be4a713 | 277 | py | Python | imdb/imdb_prediction_hook.py | iterait/cxflow-examples | e1c8e5a5e0cfe3abe92971748ac7f2c2a3673823 | [
"MIT"
] | null | null | null | imdb/imdb_prediction_hook.py | iterait/cxflow-examples | e1c8e5a5e0cfe3abe92971748ac7f2c2a3673823 | [
"MIT"
] | 3 | 2019-09-06T11:37:18.000Z | 2019-09-10T11:01:07.000Z | imdb/imdb_prediction_hook.py | iterait/emloop-examples | e1c8e5a5e0cfe3abe92971748ac7f2c2a3673823 | [
"MIT"
] | null | null | null | import emloop as el
class IMDBPredict(el.AbstractHook):
SENTIMENTS = {0: 'negative', 1: 'positive'}
def after_batch(self, stream_name, batch_data):
print('Predicted sentiment: {}'.format(IMDBPredict.SENTIMENTS[batch_data['predictions'][0]]))
print()
| 27.7 | 101 | 0.685921 |
704db6abc3a7e48adac659ac55d7a7027c0113f6 | 2,012 | py | Python | test/lazy/test_matmul_lazy_tensor.py | beyucel/gpytorch | a5394937495756945b831d83035349579d8fac31 | [
"MIT"
] | 2 | 2019-04-19T00:35:49.000Z | 2019-04-19T02:51:49.000Z | test/lazy/test_matmul_lazy_tensor.py | beyucel/gpytorch | a5394937495756945b831d83035349579d8fac31 | [
"MIT"
] | null | null | null | test/lazy/test_matmul_lazy_tensor.py | beyucel/gpytorch | a5394937495756945b831d83035349579d8fac31 | [
"MIT"
] | 1 | 2019-05-10T17:52:39.000Z | 2019-05-10T17:52:39.000Z | #!/usr/bin/env python3
import torch
import unittest
from gpytorch.lazy import MatmulLazyTensor
from test.lazy._lazy_tensor_test_case import LazyTensorTestCase, RectangularLazyTensorTestCase
class TestMatmulLazyTensor(LazyTensorTestCase, unittest.TestCase):
seed = 1
def create_lazy_tensor(self):
lhs = torch.randn(5, 6, requires_grad=True)
rhs = lhs.clone().detach().transpose(-1, -2)
covar = MatmulLazyTensor(lhs, rhs)
return covar
def evaluate_lazy_tensor(self, lazy_tensor):
return lazy_tensor.left_lazy_tensor.tensor.matmul(lazy_tensor.right_lazy_tensor.tensor)
class TestMatmulLazyTensorBatch(LazyTensorTestCase, unittest.TestCase):
seed = 3
def create_lazy_tensor(self):
lhs = torch.randn(5, 5, 6, requires_grad=True)
rhs = lhs.clone().detach().transpose(-1, -2)
covar = MatmulLazyTensor(lhs, rhs)
return covar
def evaluate_lazy_tensor(self, lazy_tensor):
return lazy_tensor.left_lazy_tensor.tensor.matmul(lazy_tensor.right_lazy_tensor.tensor)
class TestMatmulLazyTensorRectangular(RectangularLazyTensorTestCase, unittest.TestCase):
def create_lazy_tensor(self):
lhs = torch.randn(5, 3, requires_grad=True)
rhs = torch.randn(3, 6, requires_grad=True)
covar = MatmulLazyTensor(lhs, rhs)
return covar
def evaluate_lazy_tensor(self, lazy_tensor):
return lazy_tensor.left_lazy_tensor.tensor.matmul(lazy_tensor.right_lazy_tensor.tensor)
class TestMatmulLazyTensorRectangularMultiBatch(RectangularLazyTensorTestCase, unittest.TestCase):
def create_lazy_tensor(self):
lhs = torch.randn(2, 3, 5, 3, requires_grad=True)
rhs = torch.randn(2, 3, 3, 6, requires_grad=True)
covar = MatmulLazyTensor(lhs, rhs)
return covar
def evaluate_lazy_tensor(self, lazy_tensor):
return lazy_tensor.left_lazy_tensor.tensor.matmul(lazy_tensor.right_lazy_tensor.tensor)
if __name__ == "__main__":
unittest.main()
| 34.101695 | 98 | 0.732604 |
5591dda0a88ab308eda740c775a0b62f83be49ff | 127 | py | Python | clusterking/maths/__init__.py | celis/B_decays_clustering- | d73739edaf7fa82a0041d9567d27cd490232b75c | [
"MIT"
] | 9 | 2019-03-27T12:32:24.000Z | 2021-10-19T10:18:33.000Z | clusterking/maths/__init__.py | celis/B_decays_clustering- | d73739edaf7fa82a0041d9567d27cd490232b75c | [
"MIT"
] | 30 | 2019-03-26T18:32:39.000Z | 2019-09-24T06:54:28.000Z | clusterking/maths/__init__.py | celis/B_decays_clustering- | d73739edaf7fa82a0041d9567d27cd490232b75c | [
"MIT"
] | 2 | 2019-08-08T09:42:38.000Z | 2019-12-29T22:47:05.000Z | #!/usr/bin/env python3
from clusterking.maths.binning import bin_function
# from bclustering.maths.metric import chi2_metric
| 21.166667 | 50 | 0.818898 |
02dcb0f69722cb6943fda4ff27002bce9ab49b85 | 21,441 | py | Python | SiO2/SiO2_fun.py | bcaplins/NIST_APT_TOOLS | 80c25498e8b069b8ee289a2d09c76c932c054cea | [
"Unlicense"
] | null | null | null | SiO2/SiO2_fun.py | bcaplins/NIST_APT_TOOLS | 80c25498e8b069b8ee289a2d09c76c932c054cea | [
"Unlicense"
] | null | null | null | SiO2/SiO2_fun.py | bcaplins/NIST_APT_TOOLS | 80c25498e8b069b8ee289a2d09c76c932c054cea | [
"Unlicense"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Wed May 6 12:58:24 2020
@author: capli
"""
import colorcet as cc
import numpy as np
import matplotlib.pyplot as plt
import matplotlib
def extents(f):
delta = f[1] - f[0]
return [f[0] - delta/2, f[-1] + delta/2]
def create_histogram(xs,ys,x_roi=None,y_roi=None,num_x=128,num_y=128):
N,x_edges,y_edges = np.histogram2d(xs,ys,bins=[num_x,num_y],range=[x_roi,y_roi],density=False)
return (N, x_edges, y_edges)
# standard imports
def create_det_hit_plots_SI(epos, pk_data, pk_params, fig_idx=200):
def create_axes(ax, sel_idxs,title):
NNN = 64
N,x_edges,y_edges = create_histogram(epos['x_det'][sel_idxs],epos['y_det'][sel_idxs],
x_roi=[-35,35],y_roi=[-35,35],
num_x=NNN, num_y=NNN)
ax.imshow(np.transpose(N), aspect='auto',
extent=extents(x_edges) + extents(y_edges), origin='lower', cmap=cc.cm.CET_L8,
interpolation='nearest')
ax.set_aspect('equal', 'box')
ax.set(xlabel='$x_{detector}$ (mm)')
ax.set(ylabel='$y_{detector}$ (mm)')
ax.set_title(title)
return
def create_axes_v2(ax, sel_idxs1,sel_idxs2,title):
NNN = 64
N1,x_edges,y_edges = create_histogram(epos['x_det'][sel_idxs1],epos['y_det'][sel_idxs1],
x_roi=[-35,35],y_roi=[-35,35],
num_x=NNN, num_y=NNN)
N2,x_edges,y_edges = create_histogram(epos['x_det'][sel_idxs2],epos['y_det'][sel_idxs2],
x_roi=[-35,35],y_roi=[-35,35],
num_x=NNN, num_y=NNN)
ax.imshow(np.transpose(np.log10(N2/N1)), aspect='auto',
extent=extents(x_edges) + extents(y_edges), origin='lower', cmap=cc.cm.CET_L8,
interpolation='nearest')
ax.set_aspect('equal', 'box')
ax.set(xlabel='$x_{detector}$ (mm)')
ax.set(ylabel='$y_{detector}$ (mm)')
ax.set_title(title)
return
fig = plt.figure(num=fig_idx)
fig.clear()
fig, axes = plt.subplots(nrows=1, ncols=2, sharex=True, sharey=True, num=fig_idx)
axes = axes.flatten()
ax_idx = 0
# Get all events
m2q_roi = [6, 120]
sel_idxs = np.where((epos['m2q']>m2q_roi[0]) & (epos['m2q']<m2q_roi[1]))
create_axes(axes[ax_idx],sel_idxs,'ion histogram m/z $\in$'+m2q_roi.__str__())
ax_idx += 1
# CSR
HW = 0.3
Ga1p = np.array([68.92, 70.92])
Ga2p = Ga1p/2
sel_idxs1 = np.where(((epos['m2q']>(Ga1p[0]-HW)) & (epos['m2q']<(Ga1p[0]+HW))) \
| ((epos['m2q']>(Ga1p[1]-HW)) & (epos['m2q']<(Ga1p[1]+HW))))
sel_idxs2 = np.where(((epos['m2q']>(Ga2p[0]-HW)) & (epos['m2q']<(Ga2p[0]+HW))) \
| ((epos['m2q']>(Ga2p[1]-HW)) & (epos['m2q']<(Ga2p[1]+HW))))
create_axes_v2(axes[ax_idx],sel_idxs1,sel_idxs2,'CSR')
return None
def create_det_hit_plots(epos, pk_data, pk_params, fig_idx=200):
def create_axes(ax, sel_idxs,title):
N,x_edges,y_edges = create_histogram(epos['x_det'][sel_idxs],epos['y_det'][sel_idxs],
x_roi=[-35,35],y_roi=[-35,35])
ax.imshow(np.transpose(N), aspect='auto',
extent=extents(x_edges) + extents(y_edges), origin='lower', cmap=cc.cm.CET_L8,
interpolation='nearest')
ax.set_aspect('equal', 'box')
ax.set(xlabel='det_x')
ax.set(ylabel='det_y')
ax.set_title(title)
return
keys = list(pk_data.dtype.fields.keys())
keys.remove('m2q')
fig = plt.figure(num=fig_idx)
fig.clear()
fig, axes = plt.subplots(nrows=1, ncols=len(keys)+3, sharex=True, sharey=True, num=fig_idx)
axes = axes.flatten()
ax_idx = 0
# Get all events
m2q_roi = [6, 120]
sel_idxs = np.where((epos['m2q']>m2q_roi[0]) & (epos['m2q']<m2q_roi[1]))
create_axes(axes[ax_idx],sel_idxs,'roi='+m2q_roi.__str__())
ax_idx += 1
# Get hydrogen events
m2q_roi = [0.9, 1.2]
sel_idxs = np.where((epos['m2q']>m2q_roi[0]) & (epos['m2q']<m2q_roi[1]))
create_axes(axes[ax_idx],sel_idxs,'roi='+m2q_roi.__str__())
ax_idx += 1
# Get bg events
m2q_roi = [120, 200]
sel_idxs = np.where((epos['m2q']>m2q_roi[0]) & (epos['m2q']<m2q_roi[1]))
create_axes(axes[ax_idx],sel_idxs,'roi='+m2q_roi.__str__())
ax_idx += 1
for k in keys:
k_pks = np.where(pk_data[k]>0)[0]
sel_idxs = np.zeros(0,dtype='int64')
for pk in k_pks:
ev_idxs = np.where((epos['m2q']>pk_params['pre_rng'][pk]) & (epos['m2q']<pk_params['post_rng'][pk]))[0]
sel_idxs = np.concatenate((sel_idxs,ev_idxs))
create_axes(axes[ax_idx], sel_idxs, k)
ax_idx +=1
return None
def create_csr_2d_plots(epos, pk_params, Ga1p_idxs, Ga2p_idxs, fig_idx=500):
def get_events(pk_idxs):
is_part = (epos['m2q']<0)
for pk_idx in pk_idxs:
is_part = is_part | ((epos['m2q'] >= pk_params['pre_rng'][pk_idx]) & (epos['m2q'] <= pk_params['post_rng'][pk_idx]))
return epos[is_part]
Ga1p_sub_epos = get_events(Ga1p_idxs)
Ga2p_sub_epos = get_events(Ga2p_idxs)
num_disc = 32
N1,x_edges,y_edges = create_histogram(Ga1p_sub_epos['x_det'],Ga1p_sub_epos['y_det'],
x_roi=[-35,35],y_roi=[-35,35],
num_x=num_disc, num_y=num_disc)
N2,x_edges,y_edges = create_histogram(Ga2p_sub_epos['x_det'],Ga2p_sub_epos['y_det'],
x_roi=[-35,35],y_roi=[-35,35],
num_x=num_disc, num_y=num_disc)
from scipy.ndimage import gaussian_filter
EPS = 1e-6
CSR = (gaussian_filter(N2, sigma=1)/gaussian_filter(N1+EPS, sigma=1))
bad_dat = ~((N2>0) & (N1>0))
CSR[bad_dat] = CSR[~bad_dat].min()
fig = plt.figure(num=fig_idx)
fig.clear()
ax = fig.gca()
ax.imshow(np.transpose(CSR), aspect='auto',
extent=extents(x_edges) + extents(y_edges), origin='lower',
vmin=np.percentile(CSR.flatten(),1), vmax=np.percentile(CSR.flatten(),99),
cmap=cc.cm.CET_L8,
interpolation='nearest')
ax.set_aspect('equal', 'box')
ax.set(xlabel='det_x')
ax.set(ylabel='det_y')
ax.set_title('CSR')
return fig
def mean_shift(xs,ys):
radius = 5
x_curr = 0
y_curr = 0
N_LOOPS = 64
xi = np.zeros(N_LOOPS)
yi = np.zeros(N_LOOPS)
for i in np.arange(N_LOOPS):
x_prev = x_curr
y_prev = y_curr
idxs = np.where(((xs-x_curr)**2+(ys-y_curr)**2) <= radius**2)
# print(idxs)
x_q = np.mean(xs[idxs])
y_q = np.mean(ys[idxs])
dx = x_q-x_prev
dy = y_q-y_prev
x_curr = x_prev-dx
y_curr = y_prev-dy
if np.sqrt((x_curr-x_prev)**2 + (y_curr-y_prev)**2) < (radius*1e-2):
# print('iter B #',i,' ',x_curr,y_curr)
# print(i)
break
# else:
# print('iter NB #',i,' ',x_curr,y_curr)
# print('iter #',i,' ',x_curr,y_curr)
xi[i] = x_curr
yi[i] = y_curr
return (xi[:i], yi[:i])
def chop_data_rad_and_time(epos,c_pt,time_chunk_size=2**16,N_ann_chunks=3):
es2cs = lambda es : (es[:-1]+es[1:])/2.0
N_time_chunks = int(np.floor(epos.size/time_chunk_size))
time_chunk_size = epos.size//N_time_chunks
time_chunk_edges = np.arange(N_time_chunks+1)*time_chunk_size
time_chunk_centers = es2cs(time_chunk_edges)
R_DET_MAX = 28
R_C = np.sqrt(c_pt[0]**2+c_pt[1]**2)
R_MAX = R_DET_MAX-R_C
r_edges = np.sqrt(np.linspace(0, R_MAX**2, N_ann_chunks+1))
r_centers = es2cs(r_edges)
rows, cols = (N_time_chunks, N_ann_chunks)
idxs_list = [[0 for i in range(cols)] for j in range(rows)]
r = np.sqrt(np.square(epos['x_det']-c_pt[0])+np.square(epos['y_det']-c_pt[1]))
for t_idx in np.arange(N_time_chunks):
for a_idx in np.arange(N_ann_chunks):
idxs = np.where((r>r_edges[a_idx]) & (r<=r_edges[a_idx+1]))[0]
idxs_list[t_idx][a_idx] = np.intersect1d(idxs,np.arange(time_chunk_edges[t_idx],time_chunk_edges[t_idx+1]))
return (time_chunk_centers, r_centers, idxs_list)
def chop_data_z(epos,chunk_edges_nm=None, chunk_size=2**13):
es2cs = lambda es : (es[:-1]+es[1:])/2.0
if chunk_edges_nm is None:
lims = np.percentile(epos['z'],[5, 95])
N_chunks = int(np.floor(epos.size/chunk_size))
chunk_edges_nm = np.linspace(lims[0],lims[1],num=N_chunks)
chunk_centers = es2cs(chunk_edges_nm)
idxs_list = []
for z_idx in np.arange(chunk_centers.size):
idxs = np.where((epos['z']>chunk_edges_nm[z_idx]) & (epos['z']<=chunk_edges_nm[z_idx+1]))[0]
idxs_list.append(idxs)
return (chunk_centers, idxs_list)
def heatmap(data, row_labels, col_labels, ax=None,
cbar_kw={}, cbarlabel="", **kwargs):
"""
Create a heatmap from a numpy array and two lists of labels.
Parameters
----------
data
A 2D numpy array of shape (N, M).
row_labels
A list or array of length N with the labels for the rows.
col_labels
A list or array of length M with the labels for the columns.
ax
A `matplotlib.axes.Axes` instance to which the heatmap is plotted. If
not provided, use current axes or create a new one. Optional.
cbar_kw
A dictionary with arguments to `matplotlib.Figure.colorbar`. Optional.
cbarlabel
The label for the colorbar. Optional.
**kwargs
All other arguments are forwarded to `imshow`.
"""
if not ax:
ax = plt.gca()
# Plot the heatmap
im = ax.imshow(data, **kwargs)
# Create colorbar
cbar = ax.figure.colorbar(im, ax=ax, **cbar_kw)
cbar.ax.set_ylabel(cbarlabel, rotation=-90, va="bottom")
# We want to show all ticks...
ax.set_xticks(np.arange(data.shape[1]))
ax.set_yticks(np.arange(data.shape[0]))
# ... and label them with the respective list entries.
ax.set_xticklabels(col_labels)
ax.set_yticklabels(row_labels)
# Let the horizontal axes labeling appear on top.
ax.tick_params(top=True, bottom=False,
labeltop=True, labelbottom=False)
# Rotate the tick labels and set their alignment.
plt.setp(ax.get_xticklabels(), rotation=-30, ha="right",
rotation_mode="anchor")
# Turn spines off and create white grid.
for edge, spine in ax.spines.items():
spine.set_visible(False)
ax.set_xticks(np.arange(data.shape[1]+1)-.5, minor=True)
ax.set_yticks(np.arange(data.shape[0]+1)-.5, minor=True)
ax.grid(which="minor", color="w", linestyle='-', linewidth=3)
ax.tick_params(which="minor", bottom=False, left=False)
return im, cbar
def annotate_heatmap(im, data=None, valfmt="{x:.2f}",
textcolors=["black", "white"],
threshold=None, **textkw):
"""
A function to annotate a heatmap.
Parameters
----------
im
The AxesImage to be labeled.
data
Data used to annotate. If None, the image's data is used. Optional.
valfmt
The format of the annotations inside the heatmap. This should either
use the string format method, e.g. "$ {x:.2f}", or be a
`matplotlib.ticker.Formatter`. Optional.
textcolors
A list or array of two color specifications. The first is used for
values below a threshold, the second for those above. Optional.
threshold
Value in data units according to which the colors from textcolors are
applied. If None (the default) uses the middle of the colormap as
separation. Optional.
**kwargs
All other arguments are forwarded to each call to `text` used to create
the text labels.
"""
if not isinstance(data, (list, np.ndarray)):
data = im.get_array()
# Normalize the threshold to the images color range.
if threshold is not None:
threshold = im.norm(threshold)
else:
threshold = im.norm(data.max())/2.
# Set default alignment to center, but allow it to be
# overwritten by textkw.
kw = dict(horizontalalignment="center",
verticalalignment="center")
kw.update(textkw)
# Get the formatter in case a string is supplied
if isinstance(valfmt, str):
valfmt = matplotlib.ticker.StrMethodFormatter(valfmt)
# Loop over the data and create a `Text` for each "pixel".
# Change the text's color depending on the data.
texts = []
for i in range(data.shape[0]):
for j in range(data.shape[1]):
kw.update(color=textcolors[int(im.norm(data[i, j]) > threshold)])
text = im.axes.text(j, i, valfmt(data[i, j], None), **kw)
texts.append(text)
return texts
# Fit plane to QW and rotate data to 'flatten' wrt z-axis
def qw_plane_fit(x,y,z,p_guess,fwhm=3):
import scipy.linalg
# three parameters alpha*x+beta*y+gamma
p_old = -1
p_new = p_guess
mod_fun = lambda p: p[0]*x+p[1]*y+p[2]
for i in np.arange(64):
p_old = p_new
resid = np.abs(z-mod_fun(p_new))
is_near = np.where(resid<(fwhm/2.0))[0]
xq, yq, zq = (x[is_near], y[is_near], z[is_near])
A = np.c_[xq,yq,np.ones(xq.shape)]
p_new, _, _, _ = scipy.linalg.lstsq(A, zq)
print(p_new)
if np.sum(np.square(p_new-p_old)) < 1e-12:
print('break early. idx: ',i)
break
return p_new
def rotate_data_flat(p,x,y,z):
# Takes the equation for a plane and rotates the point cloud in a manner
# such that the plane (if rotated the same way, is in the x-y plane)
N = -np.array([p[0],p[1],-1])
N = N/np.sqrt(np.sum(N**2))
F = np.array([0,0,1])
ang_to_z_axis = -np.arccos(np.sum(F*N))
theta_z = np.arctan2(p[0],p[1])
c, s = (np.cos(theta_z), np.sin(theta_z))
Rz = np.array([[c,-s,0],[s,c,0],[0,0,1]])
P = np.c_[x,y,z].T
P = Rz@P
c, s = (np.cos(ang_to_z_axis), np.sin(ang_to_z_axis))
R = np.array([[1,0,0],[0,c,-s],[0,s,c]])
P = R@P
iRz = np.linalg.inv(Rz)
P = iRz@P
return (P[0,:], P[1,:], P[2,:])
def set_axes_equal(ax):
'''Make axes of 3D plot have equal scale so that spheres appear as spheres,
cubes as cubes, etc.. This is one possible solution to Matplotlib's
ax.set_aspect('equal') and ax.axis('equal') not working for 3D.
Input
ax: a matplotlib axis, e.g., as output from plt.gca().
'''
x_limits = ax.get_xlim3d()
y_limits = ax.get_ylim3d()
z_limits = ax.get_zlim3d()
x_range = abs(x_limits[1] - x_limits[0])
x_middle = np.mean(x_limits)
y_range = abs(y_limits[1] - y_limits[0])
y_middle = np.mean(y_limits)
z_range = abs(z_limits[1] - z_limits[0])
z_middle = np.mean(z_limits)
# The plot bounding box is a sphere in the sense of the infinity
# norm, hence I call half the max range the plot radius.
plot_radius = 0.5*max([x_range, y_range, z_range])
ax.set_xlim3d([x_middle - plot_radius, x_middle + plot_radius])
ax.set_ylim3d([y_middle - plot_radius, y_middle + plot_radius])
ax.set_zlim3d([z_middle - plot_radius, z_middle + plot_radius])
import plotting_stuff
def load_epos(run_number, epos_trim, fig_idx=-1):
import GaN_data_paths
import apt_fileio
# fn = GaN_data_paths.get_epos_path(run_number=run_number)
# fn = fn.rsplit(sep='.', maxsplit=1)[0]+'_vbm_corr.epos'
# fn = r"GaN epos files\R20_18162-v01_vbm_corr.epos"
fn = "GaN epos files\\" + run_number + "-v01_vbm_corr.epos"
epos = apt_fileio.read_epos_numpy(fn)
# Plot m2q vs event index and show the current ROI selection
roi_event_idxs = np.arange(epos_trim[0],epos.size-epos_trim[1])
if fig_idx>0:
ax = plotting_stuff.plot_m2q_vs_time(epos['m2q'],epos,fig_idx=fig_idx)
ax.plot(roi_event_idxs[0]*np.ones(2),[0,1200],'--k')
ax.plot(roi_event_idxs[-1]*np.ones(2),[0,1200],'--k')
ax.set_title('roi selected to start analysis')
sub_epos = epos[roi_event_idxs]
print('ROI includes the following % of events: ',sub_epos.size/epos.size)
return sub_epos
def fit_spectrum(epos, pk_data, peak_height_fraction, bg_rois):
import initElements_P3
import peak_param_determination as ppd
# Define peaks to range
ed = initElements_P3.initElements()
# Define which peaks to use for CSR calcs
Ga1p_m2qs = [ed['Ga'].isotopes[69][0], ed['Ga'].isotopes[71][0]]
Ga2p_m2qs = [ed['Ga'].isotopes[69][0]/2, ed['Ga'].isotopes[71][0]/2]
Ga1p_idxs = [np.argmin(np.abs(m2q-pk_data['m2q'])) for m2q in Ga1p_m2qs]
Ga2p_idxs = [np.argmin(np.abs(m2q-pk_data['m2q'])) for m2q in Ga2p_m2qs]
# Determine the global background
glob_bg_param = ppd.get_glob_bg(epos['m2q'],rois=bg_rois)
# Range the peaks
pk_params = ppd.get_peak_ranges(epos,
pk_data['m2q'],
peak_height_fraction=peak_height_fraction,
glob_bg_param=0)
return (pk_params, glob_bg_param, Ga1p_idxs, Ga2p_idxs)
def count_and_get_compositions(epos, pk_data, pk_params, glob_bg_param, bg_frac=1, noise_threshhold=2):
import peak_param_determination as ppd
# Count the peaks, local bg, and global bg
cts = ppd.do_counting(epos,pk_params,glob_bg_param)
cts['local_bg'] = cts['local_bg']*bg_frac
cts['global_bg'] = cts['global_bg']*bg_frac
# Test for peak S/N and throw out craptastic peaks
B = np.max(np.c_[cts['local_bg'][:,None],cts['global_bg'][:,None]],1)[:,None]
T = cts['total'][:,None]
S = T-B
std_S = np.sqrt(T+B)
# Make up a threshold for peak detection... for the most part this won't matter
# since weak peaks don't contribute to stoichiometry much... except for Mg!
is_peak = S>(noise_threshhold*np.sqrt(2*B))
for idx, ct in enumerate(cts):
if not is_peak[idx]:
for i in np.arange(len(ct)):
ct[i] = 0
# Calculate compositions
compositions = ppd.do_composition(pk_data,cts)
return (cts, compositions, is_peak)
def bin_and_smooth_spectrum(epos, user_roi, bin_wid_mDa=30, smooth_wid_mDa=-1):
from histogram_functions import bin_dat
import peak_param_determination as ppd
xs, ys = bin_dat(epos['m2q'],user_roi=user_roi,isBinAligned=True,bin_width=bin_wid_mDa/1000.0)
if smooth_wid_mDa>0:
ys_sm = ppd.moving_average(ys,smooth_wid)*smooth_wid
else:
ys_sm = ys
return (xs, ys_sm)
def plot_spectrum_gory_detail(epos, user_roi, pk_params, bg_rois, glob_bg_param, is_peak, fig_idx):
from histogram_functions import bin_dat
import peak_param_determination as ppd
xs, ys = bin_dat(epos['m2q'],user_roi=user_roi,isBinAligned=True)
#ys_sm = ppd.do_smooth_with_gaussian(ys,30)
ys_sm = ppd.moving_average(ys,30)
glob_bg = ppd.physics_bg(xs,glob_bg_param)
fig = plt.figure(num=fig_idx)
fig.clear()
ax = fig.gca()
ax.plot(xs,ys_sm,label='hist')
ax.plot(xs,glob_bg,label='global bg')
ax.set(xlabel='m/z (Da)', ylabel='counts')
ax.grid()
fig.tight_layout()
fig.canvas.manager.window.raise_()
ax.set_yscale('log')
ax.legend()
for idx,pk_param in enumerate(pk_params):
if is_peak[idx]:
ax.plot(np.array([1,1])*pk_param['pre_rng'] ,np.array([0.5,(pk_param['amp']+pk_param['off'])]),'k--')
ax.plot(np.array([1,1])*pk_param['post_rng'] ,np.array([0.5,(pk_param['amp']+pk_param['off'])]),'k--')
ax.plot(np.array([1,1])*pk_param['pre_bg_rng'] ,np.array([0.5,(pk_param['amp']+pk_param['off'])]),'m--')
ax.plot(np.array([1,1])*pk_param['post_bg_rng'] ,np.array([0.5,(pk_param['amp']+pk_param['off'])]),'m--')
ax.plot(np.array([pk_param['pre_bg_rng'],pk_param['post_bg_rng']]) ,np.ones(2)*pk_param['loc_bg'],'g--')
else:
ax.plot(np.array([1,1])*pk_param['x0_mean_shift'] ,np.array([0.5,(pk_param['amp']+pk_param['off'])]),'r--')
for roi in bg_rois:
xbox = np.array([roi[0],roi[0],roi[1],roi[1]])
ybox = np.array([0.1,np.max(ys_sm)/10,np.max(ys_sm)/10,0.1])
ax.fill(xbox,ybox, 'b', alpha=0.2)
plt.pause(0.1)
return None
| 31.764444 | 128 | 0.581409 |
a9be3bded6bf4922dde9497c0d7aab0797d7c949 | 111 | py | Python | src/gunicorn.conf.py | gixproject/esbook | 407a99bbe1625c32e5ed889e25b13f69e75b529b | [
"Apache-2.0"
] | null | null | null | src/gunicorn.conf.py | gixproject/esbook | 407a99bbe1625c32e5ed889e25b13f69e75b529b | [
"Apache-2.0"
] | null | null | null | src/gunicorn.conf.py | gixproject/esbook | 407a99bbe1625c32e5ed889e25b13f69e75b529b | [
"Apache-2.0"
] | null | null | null | import multiprocessing
bind = "0.0.0.0:5000"
workers = multiprocessing.cpu_count() * 2 + 1
loglevel = "DEBUG"
| 18.5 | 45 | 0.711712 |
9c2446b90a0c44ff8d997500e946c76cd7c14ddb | 66 | py | Python | analysis/__init__.py | mmmaaaggg/QABAT | d6f20d926de047af6857e466cf28084d0ba69993 | [
"MIT"
] | 3 | 2019-08-31T18:01:10.000Z | 2021-04-04T09:51:17.000Z | analysis/__init__.py | mmmaaaggg/QABAT | d6f20d926de047af6857e466cf28084d0ba69993 | [
"MIT"
] | null | null | null | analysis/__init__.py | mmmaaaggg/QABAT | d6f20d926de047af6857e466cf28084d0ba69993 | [
"MIT"
] | 1 | 2020-08-15T17:04:14.000Z | 2020-08-15T17:04:14.000Z | # -*- coding: utf-8 -*-
"""
Created on 2018/1/14
@author: MG
"""
| 9.428571 | 23 | 0.515152 |
1563de75ac5a7f7e9e2820e0358700d4f73a4864 | 1,831 | py | Python | arch/fileio.py | decagondev/CS_41_long | 33cac89c62316943fc539dfe828c386aa1a152ea | [
"MIT"
] | null | null | null | arch/fileio.py | decagondev/CS_41_long | 33cac89c62316943fc539dfe828c386aa1a152ea | [
"MIT"
] | null | null | null | arch/fileio.py | decagondev/CS_41_long | 33cac89c62316943fc539dfe828c386aa1a152ea | [
"MIT"
] | 6 | 2021-03-08T17:09:42.000Z | 2021-03-11T21:58:59.000Z | import sys
LDI = 0b10000010
PRN = 0b01000111
HLT = 0b00000001
program = []
if len(sys.argv) < 2:
print("Usage: fileio.py <filename>")
else:
try:
# store the first argument as a file name
file_name = sys.argv[1]
# open the file using the `open()` function
with open(file_name) as file:
# iterate over each line in the file
for line in file:
# split each line in to an element of a list
line_data = line.split("#")
# select the first element (the raw instruction string)
raw_instruction_string = line_data[0]
# strip any whitespace / newline characters using `.strip()` method
sanatized_instruction_string = raw_instruction_string.strip()
# deal with lines with only comments
if sanatized_instruction_string == '':
continue
# cast the binary string to an integer, using the `int()` function. passing in a base of 2
int_of_bin = int(sanatized_instruction_string, 2)
# # print(f"{int_of_bin:08b}: {int_of_bin}")
# if int_of_bin == LDI:
# print("LDI")
# elif int_of_bin == LDI:
# print("LDI")
# elif int_of_bin == PRN:
# print("PRN")
# elif int_of_bin == HLT:
# print("HLT")
# append the integer to the program list as an element of machine code
program.append(int_of_bin)
except FileNotFoundError:
print("I can not find the file!!!!!!!!")
print(program) | 36.62 | 110 | 0.500819 |
2ad5dc8760ebfcfafc20bf8375d386d7d48dd363 | 6,714 | py | Python | electrum/gui/kivy/uix/dialogs/crash_reporter.py | qmutz/electrum | 387834164cb28a0853d98f609491bab74d1c0975 | [
"MIT"
] | 1 | 2019-05-02T21:46:30.000Z | 2019-05-02T21:46:30.000Z | electrum/gui/kivy/uix/dialogs/crash_reporter.py | qmutz/electrum | 387834164cb28a0853d98f609491bab74d1c0975 | [
"MIT"
] | 1 | 2021-11-15T17:52:18.000Z | 2021-11-15T17:52:18.000Z | electrum/gui/kivy/uix/dialogs/crash_reporter.py | qmutz/electrum | 387834164cb28a0853d98f609491bab74d1c0975 | [
"MIT"
] | null | null | null | import sys
import json
from aiohttp.client_exceptions import ClientError
from kivy import base, utils
from kivy.clock import Clock
from kivy.core.window import Window
from kivy.factory import Factory
from kivy.lang import Builder
from kivy.uix.label import Label
from kivy.utils import platform
from electrum.gui.kivy.i18n import _
from electrum.base_crash_reporter import BaseCrashReporter
from electrum.logging import Logger
Builder.load_string('''
<CrashReporter@Popup>
BoxLayout:
orientation: 'vertical'
Label:
id: crash_message
text_size: root.width, None
size: self.texture_size
size_hint: None, None
Label:
id: request_help_message
text_size: root.width*.95, None
size: self.texture_size
size_hint: None, None
BoxLayout:
size_hint: 1, 0.1
Button:
text: 'Show report contents'
height: '48dp'
size_hint: 1, None
on_press: root.show_contents()
BoxLayout:
size_hint: 1, 0.1
Label:
id: describe_error_message
text_size: root.width, None
size: self.texture_size
size_hint: None, None
TextInput:
id: user_message
size_hint: 1, 0.3
BoxLayout:
size_hint: 1, 0.7
BoxLayout:
size_hint: 1, None
height: '48dp'
orientation: 'horizontal'
Button:
height: '48dp'
text: 'Send'
on_release: root.send_report()
Button:
text: 'Never'
on_release: root.show_never()
Button:
text: 'Not now'
on_release: root.dismiss()
<CrashReportDetails@Popup>
BoxLayout:
orientation: 'vertical'
ScrollView:
do_scroll_x: False
Label:
id: contents
text_size: root.width*.9, None
size: self.texture_size
size_hint: None, None
Button:
text: 'Close'
height: '48dp'
size_hint: 1, None
on_release: root.dismiss()
''')
class CrashReporter(BaseCrashReporter, Factory.Popup):
issue_template = """[b]Traceback[/b]
[i]{traceback}[/i]
[b]Additional information[/b]
* Electrum version: {app_version}
* Operating system: {os}
* Wallet type: {wallet_type}
* Locale: {locale}
"""
def __init__(self, main_window, exctype, value, tb):
BaseCrashReporter.__init__(self, exctype, value, tb)
Factory.Popup.__init__(self)
self.main_window = main_window
self.title = BaseCrashReporter.CRASH_TITLE
self.title_size = "24sp"
self.ids.crash_message.text = BaseCrashReporter.CRASH_MESSAGE
self.ids.request_help_message.text = BaseCrashReporter.REQUEST_HELP_MESSAGE
self.ids.describe_error_message.text = BaseCrashReporter.DESCRIBE_ERROR_MESSAGE
def show_contents(self):
details = CrashReportDetails(self.get_report_string())
details.open()
def show_popup(self, title, content):
popup = Factory.Popup(title=title,
content=Label(text=content, text_size=(Window.size[0] * 3/4, None)),
size_hint=(3/4, 3/4))
popup.open()
def send_report(self):
try:
loop = self.main_window.network.asyncio_loop
proxy = self.main_window.network.proxy
response = json.loads(BaseCrashReporter.send_report(self, loop, proxy, "/crash.json"))
except (ValueError, ClientError):
self.show_popup(_('Unable to send report'), _("Please check your network connection."))
else:
self.show_popup(_('Report sent'), response["text"])
if response["location"]:
self.open_url(response["location"])
self.dismiss()
def open_url(self, url):
if platform != 'android':
return
from jnius import autoclass, cast
String = autoclass("java.lang.String")
url = String(url)
PythonActivity = autoclass('org.kivy.android.PythonActivity')
activity = PythonActivity.mActivity
Intent = autoclass('android.content.Intent')
Uri = autoclass('android.net.Uri')
browserIntent = Intent()
# This line crashes the app:
# browserIntent.setAction(Intent.ACTION_VIEW)
# Luckily we don't need it because the OS is smart enough to recognize the URL
browserIntent.setData(Uri.parse(url))
currentActivity = cast('android.app.Activity', activity)
currentActivity.startActivity(browserIntent)
def show_never(self):
self.main_window.electrum_config.set_key(BaseCrashReporter.config_key, False)
self.dismiss()
def get_user_description(self):
return self.ids.user_message.text
def get_wallet_type(self):
return self.main_window.wallet.wallet_type
def get_os_version(self):
if utils.platform is not "android":
return utils.platform
import jnius
bv = jnius.autoclass('android.os.Build$VERSION')
b = jnius.autoclass('android.os.Build')
return "Android {} on {} {} ({})".format(bv.RELEASE, b.BRAND, b.DEVICE, b.DISPLAY)
class CrashReportDetails(Factory.Popup):
def __init__(self, text):
Factory.Popup.__init__(self)
self.title = "Report Details"
self.ids.contents.text = text
print(text)
class ExceptionHook(base.ExceptionHandler, Logger):
def __init__(self, main_window):
base.ExceptionHandler.__init__(self)
Logger.__init__(self)
self.main_window = main_window
if not main_window.electrum_config.get(BaseCrashReporter.config_key, default=True):
return
# For exceptions in Kivy:
base.ExceptionManager.add_handler(self)
# For everything else:
sys.excepthook = lambda exctype, value, tb: self.handle_exception(value)
def handle_exception(self, _inst):
exc_info = sys.exc_info()
self.logger.error('exception caught by crash reporter', exc_info=exc_info)
# Check if this is an exception from within the exception handler:
import traceback
for item in traceback.extract_tb(exc_info[2]):
if item.filename.endswith("crash_reporter.py"):
return
e = CrashReporter(self.main_window, *exc_info)
# Open in main thread:
Clock.schedule_once(lambda _: e.open(), 0)
return base.ExceptionManager.PASS
| 33.57 | 99 | 0.616324 |
a30cbe09e8de370961e20751bc850ba8aa045eaf | 5,608 | py | Python | venv/Lib/site-packages/googlemaps/distance_matrix.py | star10919/drf | 77c005794087484d72ffc0d76612a6ac9845821e | [
"BSD-3-Clause"
] | 3,797 | 2015-01-08T05:42:35.000Z | 2022-03-30T11:45:13.000Z | venv/Lib/site-packages/googlemaps/distance_matrix.py | star10919/drf | 77c005794087484d72ffc0d76612a6ac9845821e | [
"BSD-3-Clause"
] | 344 | 2015-01-09T05:39:11.000Z | 2022-03-22T07:08:56.000Z | venv/Lib/site-packages/googlemaps/distance_matrix.py | star10919/drf | 77c005794087484d72ffc0d76612a6ac9845821e | [
"BSD-3-Clause"
] | 1,368 | 2015-01-08T13:04:52.000Z | 2022-03-29T13:13:31.000Z | #
# Copyright 2014 Google Inc. All rights reserved.
#
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
#
"""Performs requests to the Google Maps Distance Matrix API."""
from googlemaps import convert
def distance_matrix(client, origins, destinations,
mode=None, language=None, avoid=None, units=None,
departure_time=None, arrival_time=None, transit_mode=None,
transit_routing_preference=None, traffic_model=None, region=None):
""" Gets travel distance and time for a matrix of origins and destinations.
:param origins: One or more addresses, Place IDs, and/or latitude/longitude
values, from which to calculate distance and time. Each Place ID string
must be prepended with 'place_id:'. If you pass an address as a string,
the service will geocode the string and convert it to a
latitude/longitude coordinate to calculate directions.
:type origins: a single location, or a list of locations, where a
location is a string, dict, list, or tuple
:param destinations: One or more addresses, Place IDs, and/or lat/lng values
, to which to calculate distance and time. Each Place ID string must be
prepended with 'place_id:'. If you pass an address as a string, the
service will geocode the string and convert it to a latitude/longitude
coordinate to calculate directions.
:type destinations: a single location, or a list of locations, where a
location is a string, dict, list, or tuple
:param mode: Specifies the mode of transport to use when calculating
directions. Valid values are "driving", "walking", "transit" or
"bicycling".
:type mode: string
:param language: The language in which to return results.
:type language: string
:param avoid: Indicates that the calculated route(s) should avoid the
indicated features. Valid values are "tolls", "highways" or "ferries".
:type avoid: string
:param units: Specifies the unit system to use when displaying results.
Valid values are "metric" or "imperial".
:type units: string
:param departure_time: Specifies the desired time of departure.
:type departure_time: int or datetime.datetime
:param arrival_time: Specifies the desired time of arrival for transit
directions. Note: you can't specify both departure_time and
arrival_time.
:type arrival_time: int or datetime.datetime
:param transit_mode: Specifies one or more preferred modes of transit.
This parameter may only be specified for requests where the mode is
transit. Valid values are "bus", "subway", "train", "tram", "rail".
"rail" is equivalent to ["train", "tram", "subway"].
:type transit_mode: string or list of strings
:param transit_routing_preference: Specifies preferences for transit
requests. Valid values are "less_walking" or "fewer_transfers".
:type transit_routing_preference: string
:param traffic_model: Specifies the predictive travel time model to use.
Valid values are "best_guess" or "optimistic" or "pessimistic".
The traffic_model parameter may only be specified for requests where
the travel mode is driving, and where the request includes a
departure_time.
:param region: Specifies the prefered region the geocoder should search
first, but it will not restrict the results to only this region. Valid
values are a ccTLD code.
:type region: string
:rtype: matrix of distances. Results are returned in rows, each row
containing one origin paired with each destination.
"""
params = {
"origins": convert.location_list(origins),
"destinations": convert.location_list(destinations)
}
if mode:
# NOTE(broady): the mode parameter is not validated by the Maps API
# server. Check here to prevent silent failures.
if mode not in ["driving", "walking", "bicycling", "transit"]:
raise ValueError("Invalid travel mode.")
params["mode"] = mode
if language:
params["language"] = language
if avoid:
if avoid not in ["tolls", "highways", "ferries"]:
raise ValueError("Invalid route restriction.")
params["avoid"] = avoid
if units:
params["units"] = units
if departure_time:
params["departure_time"] = convert.time(departure_time)
if arrival_time:
params["arrival_time"] = convert.time(arrival_time)
if departure_time and arrival_time:
raise ValueError("Should not specify both departure_time and"
"arrival_time.")
if transit_mode:
params["transit_mode"] = convert.join_list("|", transit_mode)
if transit_routing_preference:
params["transit_routing_preference"] = transit_routing_preference
if traffic_model:
params["traffic_model"] = traffic_model
if region:
params["region"] = region
return client._request("/maps/api/distancematrix/json", params)
| 40.057143 | 86 | 0.692582 |
65405a6fdc75626bc9d147fbe8553b612414bc7e | 1,842 | py | Python | setup.py | 360youlun/django-nose | 8420dfa01d5ebbc76abe53ced1efcd46d20454e2 | [
"BSD-3-Clause"
] | null | null | null | setup.py | 360youlun/django-nose | 8420dfa01d5ebbc76abe53ced1efcd46d20454e2 | [
"BSD-3-Clause"
] | null | null | null | setup.py | 360youlun/django-nose | 8420dfa01d5ebbc76abe53ced1efcd46d20454e2 | [
"BSD-3-Clause"
] | null | null | null | import os
from setuptools import setup, find_packages
ROOT = os.path.abspath(os.path.dirname(__file__))
setup(
name='django-nose',
version='1.7',
description='Makes your Django tests simple and snappy',
long_description=open(os.path.join(ROOT, 'README.rst')).read(),
author='Jeff Balogh',
author_email='me@jeffbalogh.org',
maintainer='Erik Rose',
maintainer_email='erikrose@grinchcentral.com',
url='http://github.com/django-nose/django-nose',
license='BSD',
packages=find_packages(exclude=['testapp', 'testapp/*']),
include_package_data=True,
zip_safe=False,
install_requires=['nose>=1.2.1', 'Django>=1.2'],
tests_require=['south>=0.7'],
# This blows up tox runs that install django-nose into a virtualenv,
# because it causes Nose to import django_nose.runner before the Django
# settings are initialized, leading to a mess of errors. There's no reason
# we need FixtureBundlingPlugin declared as an entrypoint anyway, since you
# need to be using django-nose to find the it useful, and django-nose knows
# about it intrinsically.
#entry_points="""
# [nose.plugins.0.10]
# fixture_bundler = django_nose.fixture_bundling:FixtureBundlingPlugin
# """,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Topic :: Software Development :: Testing'
]
)
| 38.375 | 79 | 0.658523 |
23a3bc56044e3a2c98d2b3ccb0d103de21a1b677 | 541 | py | Python | students/K33402/Khoroshkeeva_Ksenia/LR1/task_2/server.py | KseniaKhoroshkeeva/ITMO_ICT_WebDevelopment_2021-2022 | 59cda23fcd82f031dc1504d7f5abdae9e1f458c5 | [
"MIT"
] | null | null | null | students/K33402/Khoroshkeeva_Ksenia/LR1/task_2/server.py | KseniaKhoroshkeeva/ITMO_ICT_WebDevelopment_2021-2022 | 59cda23fcd82f031dc1504d7f5abdae9e1f458c5 | [
"MIT"
] | null | null | null | students/K33402/Khoroshkeeva_Ksenia/LR1/task_2/server.py | KseniaKhoroshkeeva/ITMO_ICT_WebDevelopment_2021-2022 | 59cda23fcd82f031dc1504d7f5abdae9e1f458c5 | [
"MIT"
] | null | null | null | import socket
# Создание сервера
conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
conn.bind(("127.0.0.1", 5000))
conn.listen(10)
# Прием подключения
sock, address = conn.accept()
# Получение сообщения и разделение на числа
data = sock.recv(16384)
a, b, h = data.decode("utf-8").split()
# Нахождение площади трапеции по формуле
a = float(a)
b = float(b)
h = float(h)
s = 0.5 * (a + b) * h
# Создание ответа и отправка
message = f"Площадь трапеции равна {s}"
sock.send(message.encode("utf-8"))
# Закрытие соединения
conn.close()
| 20.037037 | 56 | 0.702403 |
f886a9006c0d886881ffe5c1cd38a5a0193e0874 | 1,897 | py | Python | ietf/group/migrations/0014_set_document_m2m_keys.py | hassanakbar4/ietfdb | cabee059092ae776015410640226064331c293b7 | [
"BSD-3-Clause"
] | 25 | 2022-03-05T08:26:52.000Z | 2022-03-30T15:45:42.000Z | ietf/group/migrations/0014_set_document_m2m_keys.py | hassanakbar4/ietfdb | cabee059092ae776015410640226064331c293b7 | [
"BSD-3-Clause"
] | 219 | 2022-03-04T17:29:12.000Z | 2022-03-31T21:16:14.000Z | ietf/group/migrations/0014_set_document_m2m_keys.py | hassanakbar4/ietfdb | cabee059092ae776015410640226064331c293b7 | [
"BSD-3-Clause"
] | 22 | 2022-03-04T15:34:34.000Z | 2022-03-28T13:30:59.000Z | # Copyright The IETF Trust 2019-2020, All Rights Reserved
# -*- coding: utf-8 -*-
# Generated by Django 1.11.20 on 2019-05-10 06:48
import sys
from tqdm import tqdm
from django.db import migrations
def forward(apps, schema_editor):
Document = apps.get_model('doc','Document')
GroupMilestone = apps.get_model('group', 'GroupMilestone')
GroupMilestoneDocs = apps.get_model('group', 'GroupMilestoneDocs')
GroupMilestoneHistory = apps.get_model('group', 'GroupMilestoneHistory')
GroupMilestoneHistoryDocs = apps.get_model('group', 'GroupMilestoneHistoryDocs')
# Document id fixup ------------------------------------------------------------
objs = Document.objects.in_bulk()
nameid = { o.name: o.id for id, o in objs.items() }
sys.stderr.write('\n')
sys.stderr.write(' %s.%s:\n' % (GroupMilestone.__name__, 'docs'))
count = 0
for m in tqdm(GroupMilestone.objects.all()):
for d in m.docs.all():
count += 1
GroupMilestoneDocs.objects.get_or_create(groupmilestone=m, document_id=nameid[d.name])
sys.stderr.write(' %s GroupMilestoneDocs objects created\n' % (count, ))
sys.stderr.write(' %s.%s:\n' % (GroupMilestoneHistory.__name__, 'docs'))
count = 0
for m in tqdm(GroupMilestoneHistory.objects.all()):
for d in m.docs.all():
count += 1
GroupMilestoneHistoryDocs.objects.get_or_create(GroupMilestoneHistory=m, document_id=nameid[d.name])
sys.stderr.write(' %s GroupMilestoneHistoryDocs objects created\n' % (count, ))
def reverse(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('group', '0013_add_groupmilestone_docs2_m2m'),
('doc', '0014_set_document_docalias_id'),
]
operations = [
migrations.RunPython(forward, reverse),
]
| 32.706897 | 112 | 0.636268 |
429c53a3a334a95b37c4352ce83c2fa946386bed | 199 | py | Python | molsysmt/item/molsysmt_MolecularMechanics/is_molsysmt_MolecularMechanics.py | uibcdf/MolModMTs | 4f6b6f671a9fa3e73008d1e9c48686d5f20a6573 | [
"MIT"
] | null | null | null | molsysmt/item/molsysmt_MolecularMechanics/is_molsysmt_MolecularMechanics.py | uibcdf/MolModMTs | 4f6b6f671a9fa3e73008d1e9c48686d5f20a6573 | [
"MIT"
] | null | null | null | molsysmt/item/molsysmt_MolecularMechanics/is_molsysmt_MolecularMechanics.py | uibcdf/MolModMTs | 4f6b6f671a9fa3e73008d1e9c48686d5f20a6573 | [
"MIT"
] | null | null | null | def is_molsysmt_MolecularMechanics(item):
item_fullname = item.__class__.__module__+'.'+item.__class__.__name__
output = (item_fullname == 'molsysmt.MolecularMechanics')
return output
| 24.875 | 73 | 0.758794 |
76efbd76f0b9dc9089d4407ff533e1d46ea3eaff | 5,944 | py | Python | src/engine/SCons/Tool/textfile.py | bdbaddog/scons-gh-migrate | c76589c83ec00650a2d07dce79fc6dc5ca6465fb | [
"MIT"
] | null | null | null | src/engine/SCons/Tool/textfile.py | bdbaddog/scons-gh-migrate | c76589c83ec00650a2d07dce79fc6dc5ca6465fb | [
"MIT"
] | null | null | null | src/engine/SCons/Tool/textfile.py | bdbaddog/scons-gh-migrate | c76589c83ec00650a2d07dce79fc6dc5ca6465fb | [
"MIT"
] | null | null | null | # -*- python -*-
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__doc__ = """
Textfile/Substfile builder for SCons.
Create file 'target' which typically is a textfile. The 'source'
may be any combination of strings, Nodes, or lists of same. A
'linesep' will be put between any part written and defaults to
os.linesep.
The only difference between the Textfile builder and the Substfile
builder is that strings are converted to Value() nodes for the
former and File() nodes for the latter. To insert files in the
former or strings in the latter, wrap them in a File() or Value(),
respectively.
The values of SUBST_DICT first have any construction variables
expanded (its keys are not expanded). If a value of SUBST_DICT is
a python callable function, it is called and the result is expanded
as the value. Values are substituted in a "random" order; if any
substitution could be further expanded by another substitution, it
is unpredictable whether the expansion will occur.
"""
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import SCons
import os
import re
from SCons.Node import Node
from SCons.Node.Python import Value
from SCons.Util import is_String, is_Sequence, is_Dict
def _do_subst(node, subs):
"""
Fetch the node contents and replace all instances of the keys with
their values. For example, if subs is
{'%VERSION%': '1.2345', '%BASE%': 'MyProg', '%prefix%': '/bin'},
then all instances of %VERSION% in the file will be replaced with
1.2345 and so forth.
"""
contents = node.get_text_contents()
if not subs: return contents
for (k,v) in subs:
contents = re.sub(k, v, contents)
return contents
def _action(target, source, env):
# prepare the line separator
linesep = env['LINESEPARATOR']
if linesep is None:
linesep = os.linesep
elif is_String(linesep):
pass
elif isinstance(linesep, Value):
linesep = linesep.get_text_contents()
else:
raise SCons.Errors.UserError(
'unexpected type/class for LINESEPARATOR: %s'
% repr(linesep), None)
# create a dictionary to use for the substitutions
if 'SUBST_DICT' not in env:
subs = None # no substitutions
else:
d = env['SUBST_DICT']
if is_Dict(d):
d = list(d.items())
elif is_Sequence(d):
pass
else:
raise SCons.Errors.UserError('SUBST_DICT must be dict or sequence')
subs = []
for (k,v) in d:
if callable(v):
v = v()
if is_String(v):
v = env.subst(v)
else:
v = str(v)
subs.append((k,v))
# write the file
try:
fd = open(target[0].get_path(), "wb")
except (OSError,IOError), e:
raise SCons.Errors.UserError("Can't write target file %s" % target[0])
# separate lines by 'linesep' only if linesep is not empty
lsep = None
for s in source:
if lsep: fd.write(lsep)
fd.write(_do_subst(s, subs))
lsep = linesep
fd.close()
def _strfunc(target, source, env):
return "Creating '%s'" % target[0]
def _convert_list_R(newlist, sources):
for elem in sources:
if is_Sequence(elem):
_convert_list_R(newlist, elem)
elif isinstance(elem, Node):
newlist.append(elem)
else:
newlist.append(Value(elem))
def _convert_list(target, source, env):
if len(target) != 1:
raise SCons.Errors.UserError("Only one target file allowed")
newlist = []
_convert_list_R(newlist, source)
return target, newlist
_common_varlist = ['SUBST_DICT', 'LINESEPARATOR']
_text_varlist = _common_varlist + ['TEXTFILEPREFIX', 'TEXTFILESUFFIX']
_text_builder = SCons.Builder.Builder(
action = SCons.Action.Action(_action, _strfunc, varlist = _text_varlist),
source_factory = Value,
emitter = _convert_list,
prefix = '$TEXTFILEPREFIX',
suffix = '$TEXTFILESUFFIX',
)
_subst_varlist = _common_varlist + ['SUBSTFILEPREFIX', 'TEXTFILESUFFIX']
_subst_builder = SCons.Builder.Builder(
action = SCons.Action.Action(_action, _strfunc, varlist = _subst_varlist),
source_factory = SCons.Node.FS.File,
emitter = _convert_list,
prefix = '$SUBSTFILEPREFIX',
suffix = '$SUBSTFILESUFFIX',
src_suffix = ['.in'],
)
def generate(env):
env['LINESEPARATOR'] = os.linesep
env['BUILDERS']['Textfile'] = _text_builder
env['TEXTFILEPREFIX'] = ''
env['TEXTFILESUFFIX'] = '.txt'
env['BUILDERS']['Substfile'] = _subst_builder
env['SUBSTFILEPREFIX'] = ''
env['SUBSTFILESUFFIX'] = ''
def exists(env):
return 1
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| 33.772727 | 79 | 0.664536 |
4904c812edb61f18a2ffe5ee678e5a34d4634f8d | 1,004 | py | Python | samples/ch04/a4_6_2.py | fengyc/programming-pearls-2nd-python | 6a91f768ed69a4d57d8201f929d2860d06b371ed | [
"Apache-2.0"
] | 12 | 2016-11-02T15:59:36.000Z | 2021-09-11T00:11:40.000Z | samples/ch04/a4_6_2.py | love112358/programming-pearls-2nd-python | 6a91f768ed69a4d57d8201f929d2860d06b371ed | [
"Apache-2.0"
] | null | null | null | samples/ch04/a4_6_2.py | love112358/programming-pearls-2nd-python | 6a91f768ed69a4d57d8201f929d2860d06b371ed | [
"Apache-2.0"
] | 4 | 2018-11-08T01:38:16.000Z | 2021-05-26T02:34:26.000Z | # -*- coding:utf-8 -*-
#
# 二分搜索,并返回第一个匹配的数据
#
# 输入数据 data 从小到大排序
#
# 关键在于对 data[middle] == t 时的处理,需要再次计算 low 与 middle 中间的值 data[q]
# 来确定下一步的调整:data[q] < t 时,low = q ; data[q] == t 时,height = q 。然后
# 继续查找 t ,直到第一次出现的位置。
#
def binary_search_first(data, t):
""" 二分搜索,返回第一个匹配数据的下标 """
low = 0
high = len(data) - 1
while low <= high:
if data[low] == t:
return low
middle = (high - low) // 2 + low
if data[middle] > t:
high = middle - 1
elif data[middle] < t:
low = middle + 1
else:
q = (middle - low) // 2 + low
if data[q] < t:
low = q + 1
else:
high = q
return None
def test_search():
import random
data = [i for i in range(1000)]
x = random.randint(0, 999)
for i in range(50):
data.insert(x, x)
k = binary_search_first(data, x)
assert x == k
k = binary_search_first(data, 1000)
assert k is None
| 20.916667 | 65 | 0.501992 |
7104121d2983f9ec313266482def67a5b0748c34 | 125,779 | py | Python | tests/test_x509_ext.py | E-Tahta/python-cryptography | d2d82b41877ba3ae04210aa64f7c81540dd0d31a | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | tests/test_x509_ext.py | E-Tahta/python-cryptography | d2d82b41877ba3ae04210aa64f7c81540dd0d31a | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | tests/test_x509_ext.py | E-Tahta/python-cryptography | d2d82b41877ba3ae04210aa64f7c81540dd0d31a | [
"Apache-2.0",
"BSD-3-Clause"
] | 1 | 2019-01-23T04:08:17.000Z | 2019-01-23T04:08:17.000Z | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import binascii
import datetime
import ipaddress
import os
import pytest
import six
from cryptography import x509
from cryptography.hazmat.backends.interfaces import (
DSABackend, EllipticCurveBackend, RSABackend, X509Backend
)
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import ec
from cryptography.x509 import DNSName, NameConstraints, SubjectAlternativeName
from cryptography.x509.oid import (
AuthorityInformationAccessOID, ExtendedKeyUsageOID, ExtensionOID,
NameOID, ObjectIdentifier
)
from .hazmat.primitives.fixtures_rsa import RSA_KEY_2048
from .hazmat.primitives.test_ec import _skip_curve_unsupported
from .test_x509 import _load_cert
def _make_certbuilder(private_key):
name = x509.Name(
[x509.NameAttribute(NameOID.COMMON_NAME, u'example.org')])
return (
x509.CertificateBuilder()
.subject_name(name)
.issuer_name(name)
.public_key(private_key.public_key())
.serial_number(777)
.not_valid_before(datetime.datetime(1999, 1, 1))
.not_valid_after(datetime.datetime(2020, 1, 1))
)
class TestExtension(object):
def test_not_an_oid(self):
bc = x509.BasicConstraints(ca=False, path_length=None)
with pytest.raises(TypeError):
x509.Extension("notanoid", True, bc)
def test_critical_not_a_bool(self):
bc = x509.BasicConstraints(ca=False, path_length=None)
with pytest.raises(TypeError):
x509.Extension(ExtensionOID.BASIC_CONSTRAINTS, "notabool", bc)
def test_repr(self):
bc = x509.BasicConstraints(ca=False, path_length=None)
ext = x509.Extension(ExtensionOID.BASIC_CONSTRAINTS, True, bc)
assert repr(ext) == (
"<Extension(oid=<ObjectIdentifier(oid=2.5.29.19, name=basicConst"
"raints)>, critical=True, value=<BasicConstraints(ca=False, path"
"_length=None)>)>"
)
def test_eq(self):
ext1 = x509.Extension(
x509.ObjectIdentifier('1.2.3.4'), False, 'value'
)
ext2 = x509.Extension(
x509.ObjectIdentifier('1.2.3.4'), False, 'value'
)
assert ext1 == ext2
def test_ne(self):
ext1 = x509.Extension(
x509.ObjectIdentifier('1.2.3.4'), False, 'value'
)
ext2 = x509.Extension(
x509.ObjectIdentifier('1.2.3.5'), False, 'value'
)
ext3 = x509.Extension(
x509.ObjectIdentifier('1.2.3.4'), True, 'value'
)
ext4 = x509.Extension(
x509.ObjectIdentifier('1.2.3.4'), False, 'value4'
)
assert ext1 != ext2
assert ext1 != ext3
assert ext1 != ext4
assert ext1 != object()
class TestUnrecognizedExtension(object):
def test_invalid_oid(self):
with pytest.raises(TypeError):
x509.UnrecognizedExtension("notanoid", b"somedata")
def test_eq(self):
ext1 = x509.UnrecognizedExtension(
x509.ObjectIdentifier("1.2.3.4"), b"\x03\x02\x01"
)
ext2 = x509.UnrecognizedExtension(
x509.ObjectIdentifier("1.2.3.4"), b"\x03\x02\x01"
)
assert ext1 == ext2
def test_ne(self):
ext1 = x509.UnrecognizedExtension(
x509.ObjectIdentifier("1.2.3.4"), b"\x03\x02\x01"
)
ext2 = x509.UnrecognizedExtension(
x509.ObjectIdentifier("1.2.3.4"), b"\x03\x02\x02"
)
ext3 = x509.UnrecognizedExtension(
x509.ObjectIdentifier("1.2.3.5"), b"\x03\x02\x01"
)
assert ext1 != ext2
assert ext1 != ext3
assert ext1 != object()
def test_repr(self):
ext1 = x509.UnrecognizedExtension(
x509.ObjectIdentifier("1.2.3.4"), b"\x03\x02\x01"
)
if six.PY3:
assert repr(ext1) == (
"<UnrecognizedExtension(oid=<ObjectIdentifier(oid=1.2.3.4, "
"name=Unknown OID)>, value=b'\\x03\\x02\\x01')>"
)
else:
assert repr(ext1) == (
"<UnrecognizedExtension(oid=<ObjectIdentifier(oid=1.2.3.4, "
"name=Unknown OID)>, value='\\x03\\x02\\x01')>"
)
def test_hash(self):
ext1 = x509.UnrecognizedExtension(
x509.ObjectIdentifier("1.2.3.4"), b"\x03\x02\x01"
)
ext2 = x509.UnrecognizedExtension(
x509.ObjectIdentifier("1.2.3.4"), b"\x03\x02\x01"
)
ext3 = x509.UnrecognizedExtension(
x509.ObjectIdentifier("1.2.3.5"), b"\x03\x02\x01"
)
assert hash(ext1) == hash(ext2)
assert hash(ext1) != hash(ext3)
class TestCertificateIssuer(object):
def test_iter_names(self):
ci = x509.CertificateIssuer([
x509.DNSName(u"cryptography.io"),
x509.DNSName(u"crypto.local"),
])
assert len(ci) == 2
assert list(ci) == [
x509.DNSName(u"cryptography.io"),
x509.DNSName(u"crypto.local"),
]
def test_indexing(self):
ci = x509.CertificateIssuer([
x509.DNSName(u"cryptography.io"),
x509.DNSName(u"crypto.local"),
x509.DNSName(u"another.local"),
x509.RFC822Name(u"email@another.local"),
x509.UniformResourceIdentifier(u"http://another.local"),
])
assert ci[-1] == ci[4]
assert ci[2:6:2] == [ci[2], ci[4]]
def test_eq(self):
ci1 = x509.CertificateIssuer([x509.DNSName(u"cryptography.io")])
ci2 = x509.CertificateIssuer([x509.DNSName(u"cryptography.io")])
assert ci1 == ci2
def test_ne(self):
ci1 = x509.CertificateIssuer([x509.DNSName(u"cryptography.io")])
ci2 = x509.CertificateIssuer([x509.DNSName(u"somethingelse.tld")])
assert ci1 != ci2
assert ci1 != object()
def test_repr(self):
ci = x509.CertificateIssuer([x509.DNSName(u"cryptography.io")])
assert repr(ci) == (
"<CertificateIssuer(<GeneralNames([<DNSName(value=cryptography.io"
")>])>)>"
)
def test_get_values_for_type(self):
ci = x509.CertificateIssuer(
[x509.DNSName(u"cryptography.io")]
)
names = ci.get_values_for_type(x509.DNSName)
assert names == [u"cryptography.io"]
class TestCRLReason(object):
def test_invalid_reason_flags(self):
with pytest.raises(TypeError):
x509.CRLReason("notareason")
def test_eq(self):
reason1 = x509.CRLReason(x509.ReasonFlags.unspecified)
reason2 = x509.CRLReason(x509.ReasonFlags.unspecified)
assert reason1 == reason2
def test_ne(self):
reason1 = x509.CRLReason(x509.ReasonFlags.unspecified)
reason2 = x509.CRLReason(x509.ReasonFlags.ca_compromise)
assert reason1 != reason2
assert reason1 != object()
def test_hash(self):
reason1 = x509.CRLReason(x509.ReasonFlags.unspecified)
reason2 = x509.CRLReason(x509.ReasonFlags.unspecified)
reason3 = x509.CRLReason(x509.ReasonFlags.ca_compromise)
assert hash(reason1) == hash(reason2)
assert hash(reason1) != hash(reason3)
def test_repr(self):
reason1 = x509.CRLReason(x509.ReasonFlags.unspecified)
assert repr(reason1) == (
"<CRLReason(reason=ReasonFlags.unspecified)>"
)
class TestInvalidityDate(object):
def test_invalid_invalidity_date(self):
with pytest.raises(TypeError):
x509.InvalidityDate("notadate")
def test_eq(self):
invalid1 = x509.InvalidityDate(datetime.datetime(2015, 1, 1, 1, 1))
invalid2 = x509.InvalidityDate(datetime.datetime(2015, 1, 1, 1, 1))
assert invalid1 == invalid2
def test_ne(self):
invalid1 = x509.InvalidityDate(datetime.datetime(2015, 1, 1, 1, 1))
invalid2 = x509.InvalidityDate(datetime.datetime(2015, 1, 1, 1, 2))
assert invalid1 != invalid2
assert invalid1 != object()
def test_repr(self):
invalid1 = x509.InvalidityDate(datetime.datetime(2015, 1, 1, 1, 1))
assert repr(invalid1) == (
"<InvalidityDate(invalidity_date=2015-01-01 01:01:00)>"
)
def test_hash(self):
invalid1 = x509.InvalidityDate(datetime.datetime(2015, 1, 1, 1, 1))
invalid2 = x509.InvalidityDate(datetime.datetime(2015, 1, 1, 1, 1))
invalid3 = x509.InvalidityDate(datetime.datetime(2015, 1, 1, 1, 2))
assert hash(invalid1) == hash(invalid2)
assert hash(invalid1) != hash(invalid3)
class TestNoticeReference(object):
def test_notice_numbers_not_all_int(self):
with pytest.raises(TypeError):
x509.NoticeReference("org", [1, 2, "three"])
def test_notice_numbers_none(self):
with pytest.raises(TypeError):
x509.NoticeReference("org", None)
def test_iter_input(self):
numbers = [1, 3, 4]
nr = x509.NoticeReference(u"org", iter(numbers))
assert list(nr.notice_numbers) == numbers
def test_repr(self):
nr = x509.NoticeReference(u"org", [1, 3, 4])
if six.PY3:
assert repr(nr) == (
"<NoticeReference(organization='org', notice_numbers=[1, 3, 4"
"])>"
)
else:
assert repr(nr) == (
"<NoticeReference(organization=u'org', notice_numbers=[1, 3, "
"4])>"
)
def test_eq(self):
nr = x509.NoticeReference("org", [1, 2])
nr2 = x509.NoticeReference("org", [1, 2])
assert nr == nr2
def test_ne(self):
nr = x509.NoticeReference("org", [1, 2])
nr2 = x509.NoticeReference("org", [1])
nr3 = x509.NoticeReference(None, [1, 2])
assert nr != nr2
assert nr != nr3
assert nr != object()
class TestUserNotice(object):
def test_notice_reference_invalid(self):
with pytest.raises(TypeError):
x509.UserNotice("invalid", None)
def test_notice_reference_none(self):
un = x509.UserNotice(None, "text")
assert un.notice_reference is None
assert un.explicit_text == "text"
def test_repr(self):
un = x509.UserNotice(x509.NoticeReference(u"org", [1]), u"text")
if six.PY3:
assert repr(un) == (
"<UserNotice(notice_reference=<NoticeReference(organization='"
"org', notice_numbers=[1])>, explicit_text='text')>"
)
else:
assert repr(un) == (
"<UserNotice(notice_reference=<NoticeReference(organization=u"
"'org', notice_numbers=[1])>, explicit_text=u'text')>"
)
def test_eq(self):
nr = x509.NoticeReference("org", [1, 2])
nr2 = x509.NoticeReference("org", [1, 2])
un = x509.UserNotice(nr, "text")
un2 = x509.UserNotice(nr2, "text")
assert un == un2
def test_ne(self):
nr = x509.NoticeReference("org", [1, 2])
nr2 = x509.NoticeReference("org", [1])
un = x509.UserNotice(nr, "text")
un2 = x509.UserNotice(nr2, "text")
un3 = x509.UserNotice(nr, "text3")
assert un != un2
assert un != un3
assert un != object()
class TestPolicyInformation(object):
def test_invalid_policy_identifier(self):
with pytest.raises(TypeError):
x509.PolicyInformation("notanoid", None)
def test_none_policy_qualifiers(self):
pi = x509.PolicyInformation(x509.ObjectIdentifier("1.2.3"), None)
assert pi.policy_identifier == x509.ObjectIdentifier("1.2.3")
assert pi.policy_qualifiers is None
def test_policy_qualifiers(self):
pq = [u"string"]
pi = x509.PolicyInformation(x509.ObjectIdentifier("1.2.3"), pq)
assert pi.policy_identifier == x509.ObjectIdentifier("1.2.3")
assert pi.policy_qualifiers == pq
def test_invalid_policy_identifiers(self):
with pytest.raises(TypeError):
x509.PolicyInformation(x509.ObjectIdentifier("1.2.3"), [1, 2])
def test_iter_input(self):
qual = [u"foo", u"bar"]
pi = x509.PolicyInformation(x509.ObjectIdentifier("1.2.3"), iter(qual))
assert list(pi.policy_qualifiers) == qual
def test_repr(self):
pq = [u"string", x509.UserNotice(None, u"hi")]
pi = x509.PolicyInformation(x509.ObjectIdentifier("1.2.3"), pq)
if six.PY3:
assert repr(pi) == (
"<PolicyInformation(policy_identifier=<ObjectIdentifier(oid=1."
"2.3, name=Unknown OID)>, policy_qualifiers=['string', <UserNo"
"tice(notice_reference=None, explicit_text='hi')>])>"
)
else:
assert repr(pi) == (
"<PolicyInformation(policy_identifier=<ObjectIdentifier(oid=1."
"2.3, name=Unknown OID)>, policy_qualifiers=[u'string', <UserN"
"otice(notice_reference=None, explicit_text=u'hi')>])>"
)
def test_eq(self):
pi = x509.PolicyInformation(
x509.ObjectIdentifier("1.2.3"),
[u"string", x509.UserNotice(None, u"hi")]
)
pi2 = x509.PolicyInformation(
x509.ObjectIdentifier("1.2.3"),
[u"string", x509.UserNotice(None, u"hi")]
)
assert pi == pi2
def test_ne(self):
pi = x509.PolicyInformation(
x509.ObjectIdentifier("1.2.3"), [u"string"]
)
pi2 = x509.PolicyInformation(
x509.ObjectIdentifier("1.2.3"), [u"string2"]
)
pi3 = x509.PolicyInformation(
x509.ObjectIdentifier("1.2.3.4"), [u"string"]
)
assert pi != pi2
assert pi != pi3
assert pi != object()
class TestCertificatePolicies(object):
def test_invalid_policies(self):
pq = [u"string"]
pi = x509.PolicyInformation(x509.ObjectIdentifier("1.2.3"), pq)
with pytest.raises(TypeError):
x509.CertificatePolicies([1, pi])
def test_iter_len(self):
pq = [u"string"]
pi = x509.PolicyInformation(x509.ObjectIdentifier("1.2.3"), pq)
cp = x509.CertificatePolicies([pi])
assert len(cp) == 1
for policyinfo in cp:
assert policyinfo == pi
def test_iter_input(self):
policies = [
x509.PolicyInformation(x509.ObjectIdentifier("1.2.3"), [u"string"])
]
cp = x509.CertificatePolicies(iter(policies))
assert list(cp) == policies
def test_repr(self):
pq = [u"string"]
pi = x509.PolicyInformation(x509.ObjectIdentifier("1.2.3"), pq)
cp = x509.CertificatePolicies([pi])
if six.PY3:
assert repr(cp) == (
"<CertificatePolicies([<PolicyInformation(policy_identifier=<O"
"bjectIdentifier(oid=1.2.3, name=Unknown OID)>, policy_qualifi"
"ers=['string'])>])>"
)
else:
assert repr(cp) == (
"<CertificatePolicies([<PolicyInformation(policy_identifier=<O"
"bjectIdentifier(oid=1.2.3, name=Unknown OID)>, policy_qualifi"
"ers=[u'string'])>])>"
)
def test_eq(self):
pi = x509.PolicyInformation(
x509.ObjectIdentifier("1.2.3"), [u"string"]
)
cp = x509.CertificatePolicies([pi])
pi2 = x509.PolicyInformation(
x509.ObjectIdentifier("1.2.3"), [u"string"]
)
cp2 = x509.CertificatePolicies([pi2])
assert cp == cp2
def test_ne(self):
pi = x509.PolicyInformation(
x509.ObjectIdentifier("1.2.3"), [u"string"]
)
cp = x509.CertificatePolicies([pi])
pi2 = x509.PolicyInformation(
x509.ObjectIdentifier("1.2.3"), [u"string2"]
)
cp2 = x509.CertificatePolicies([pi2])
assert cp != cp2
assert cp != object()
def test_indexing(self):
pi = x509.PolicyInformation(x509.ObjectIdentifier("1.2.3"), [u"test"])
pi2 = x509.PolicyInformation(x509.ObjectIdentifier("1.2.4"), [u"test"])
pi3 = x509.PolicyInformation(x509.ObjectIdentifier("1.2.5"), [u"test"])
pi4 = x509.PolicyInformation(x509.ObjectIdentifier("1.2.6"), [u"test"])
pi5 = x509.PolicyInformation(x509.ObjectIdentifier("1.2.7"), [u"test"])
cp = x509.CertificatePolicies([pi, pi2, pi3, pi4, pi5])
assert cp[-1] == cp[4]
assert cp[2:6:2] == [cp[2], cp[4]]
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
class TestCertificatePoliciesExtension(object):
def test_cps_uri_policy_qualifier(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "cp_cps_uri.pem"),
x509.load_pem_x509_certificate,
backend
)
cp = cert.extensions.get_extension_for_oid(
ExtensionOID.CERTIFICATE_POLICIES
).value
assert cp == x509.CertificatePolicies([
x509.PolicyInformation(
x509.ObjectIdentifier("2.16.840.1.12345.1.2.3.4.1"),
[u"http://other.com/cps"]
)
])
def test_user_notice_with_notice_reference(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "cp_user_notice_with_notice_reference.pem"
),
x509.load_pem_x509_certificate,
backend
)
cp = cert.extensions.get_extension_for_oid(
ExtensionOID.CERTIFICATE_POLICIES
).value
assert cp == x509.CertificatePolicies([
x509.PolicyInformation(
x509.ObjectIdentifier("2.16.840.1.12345.1.2.3.4.1"),
[
u"http://example.com/cps",
u"http://other.com/cps",
x509.UserNotice(
x509.NoticeReference(u"my org", [1, 2, 3, 4]),
u"thing"
)
]
)
])
def test_user_notice_with_explicit_text(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "cp_user_notice_with_explicit_text.pem"
),
x509.load_pem_x509_certificate,
backend
)
cp = cert.extensions.get_extension_for_oid(
ExtensionOID.CERTIFICATE_POLICIES
).value
assert cp == x509.CertificatePolicies([
x509.PolicyInformation(
x509.ObjectIdentifier("2.16.840.1.12345.1.2.3.4.1"),
[x509.UserNotice(None, u"thing")]
)
])
def test_user_notice_no_explicit_text(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "cp_user_notice_no_explicit_text.pem"
),
x509.load_pem_x509_certificate,
backend
)
cp = cert.extensions.get_extension_for_oid(
ExtensionOID.CERTIFICATE_POLICIES
).value
assert cp == x509.CertificatePolicies([
x509.PolicyInformation(
x509.ObjectIdentifier("2.16.840.1.12345.1.2.3.4.1"),
[
x509.UserNotice(
x509.NoticeReference(u"my org", [1, 2, 3, 4]),
None
)
]
)
])
class TestKeyUsage(object):
def test_key_agreement_false_encipher_decipher_true(self):
with pytest.raises(ValueError):
x509.KeyUsage(
digital_signature=False,
content_commitment=False,
key_encipherment=False,
data_encipherment=False,
key_agreement=False,
key_cert_sign=False,
crl_sign=False,
encipher_only=True,
decipher_only=False
)
with pytest.raises(ValueError):
x509.KeyUsage(
digital_signature=False,
content_commitment=False,
key_encipherment=False,
data_encipherment=False,
key_agreement=False,
key_cert_sign=False,
crl_sign=False,
encipher_only=True,
decipher_only=True
)
with pytest.raises(ValueError):
x509.KeyUsage(
digital_signature=False,
content_commitment=False,
key_encipherment=False,
data_encipherment=False,
key_agreement=False,
key_cert_sign=False,
crl_sign=False,
encipher_only=False,
decipher_only=True
)
def test_properties_key_agreement_true(self):
ku = x509.KeyUsage(
digital_signature=True,
content_commitment=True,
key_encipherment=False,
data_encipherment=False,
key_agreement=False,
key_cert_sign=True,
crl_sign=False,
encipher_only=False,
decipher_only=False
)
assert ku.digital_signature is True
assert ku.content_commitment is True
assert ku.key_encipherment is False
assert ku.data_encipherment is False
assert ku.key_agreement is False
assert ku.key_cert_sign is True
assert ku.crl_sign is False
def test_key_agreement_true_properties(self):
ku = x509.KeyUsage(
digital_signature=False,
content_commitment=False,
key_encipherment=False,
data_encipherment=False,
key_agreement=True,
key_cert_sign=False,
crl_sign=False,
encipher_only=False,
decipher_only=True
)
assert ku.key_agreement is True
assert ku.encipher_only is False
assert ku.decipher_only is True
def test_key_agreement_false_properties(self):
ku = x509.KeyUsage(
digital_signature=False,
content_commitment=False,
key_encipherment=False,
data_encipherment=False,
key_agreement=False,
key_cert_sign=False,
crl_sign=False,
encipher_only=False,
decipher_only=False
)
assert ku.key_agreement is False
with pytest.raises(ValueError):
ku.encipher_only
with pytest.raises(ValueError):
ku.decipher_only
def test_repr_key_agreement_false(self):
ku = x509.KeyUsage(
digital_signature=True,
content_commitment=True,
key_encipherment=False,
data_encipherment=False,
key_agreement=False,
key_cert_sign=True,
crl_sign=False,
encipher_only=False,
decipher_only=False
)
assert repr(ku) == (
"<KeyUsage(digital_signature=True, content_commitment=True, key_en"
"cipherment=False, data_encipherment=False, key_agreement=False, k"
"ey_cert_sign=True, crl_sign=False, encipher_only=None, decipher_o"
"nly=None)>"
)
def test_repr_key_agreement_true(self):
ku = x509.KeyUsage(
digital_signature=True,
content_commitment=True,
key_encipherment=False,
data_encipherment=False,
key_agreement=True,
key_cert_sign=True,
crl_sign=False,
encipher_only=False,
decipher_only=False
)
assert repr(ku) == (
"<KeyUsage(digital_signature=True, content_commitment=True, key_en"
"cipherment=False, data_encipherment=False, key_agreement=True, k"
"ey_cert_sign=True, crl_sign=False, encipher_only=False, decipher_"
"only=False)>"
)
def test_eq(self):
ku = x509.KeyUsage(
digital_signature=False,
content_commitment=False,
key_encipherment=False,
data_encipherment=False,
key_agreement=True,
key_cert_sign=False,
crl_sign=False,
encipher_only=False,
decipher_only=True
)
ku2 = x509.KeyUsage(
digital_signature=False,
content_commitment=False,
key_encipherment=False,
data_encipherment=False,
key_agreement=True,
key_cert_sign=False,
crl_sign=False,
encipher_only=False,
decipher_only=True
)
assert ku == ku2
def test_ne(self):
ku = x509.KeyUsage(
digital_signature=False,
content_commitment=False,
key_encipherment=False,
data_encipherment=False,
key_agreement=True,
key_cert_sign=False,
crl_sign=False,
encipher_only=False,
decipher_only=True
)
ku2 = x509.KeyUsage(
digital_signature=False,
content_commitment=False,
key_encipherment=False,
data_encipherment=False,
key_agreement=False,
key_cert_sign=False,
crl_sign=False,
encipher_only=False,
decipher_only=False
)
assert ku != ku2
assert ku != object()
class TestSubjectKeyIdentifier(object):
def test_properties(self):
value = binascii.unhexlify(b"092384932230498bc980aa8098456f6ff7ff3ac9")
ski = x509.SubjectKeyIdentifier(value)
assert ski.digest == value
def test_repr(self):
ski = x509.SubjectKeyIdentifier(
binascii.unhexlify(b"092384932230498bc980aa8098456f6ff7ff3ac9")
)
ext = x509.Extension(ExtensionOID.SUBJECT_KEY_IDENTIFIER, False, ski)
if six.PY3:
assert repr(ext) == (
"<Extension(oid=<ObjectIdentifier(oid=2.5.29.14, name=subjectK"
"eyIdentifier)>, critical=False, value=<SubjectKeyIdentifier(d"
"igest=b\'\\t#\\x84\\x93\"0I\\x8b\\xc9\\x80\\xaa\\x80\\x98Eoo"
"\\xf7\\xff:\\xc9\')>)>"
)
else:
assert repr(ext) == (
"<Extension(oid=<ObjectIdentifier(oid=2.5.29.14, name=subjectK"
"eyIdentifier)>, critical=False, value=<SubjectKeyIdentifier(d"
"igest=\'\\t#\\x84\\x93\"0I\\x8b\\xc9\\x80\\xaa\\x80\\x98Eoo"
"\\xf7\\xff:\\xc9\')>)>"
)
def test_eq(self):
ski = x509.SubjectKeyIdentifier(
binascii.unhexlify(b"092384932230498bc980aa8098456f6ff7ff3ac9")
)
ski2 = x509.SubjectKeyIdentifier(
binascii.unhexlify(b"092384932230498bc980aa8098456f6ff7ff3ac9")
)
assert ski == ski2
def test_ne(self):
ski = x509.SubjectKeyIdentifier(
binascii.unhexlify(b"092384932230498bc980aa8098456f6ff7ff3ac9")
)
ski2 = x509.SubjectKeyIdentifier(
binascii.unhexlify(b"aa8098456f6ff7ff3ac9092384932230498bc980")
)
assert ski != ski2
assert ski != object()
def test_hash(self):
ski1 = x509.SubjectKeyIdentifier(
binascii.unhexlify(b"092384932230498bc980aa8098456f6ff7ff3ac9")
)
ski2 = x509.SubjectKeyIdentifier(
binascii.unhexlify(b"092384932230498bc980aa8098456f6ff7ff3ac9")
)
ski3 = x509.SubjectKeyIdentifier(
binascii.unhexlify(b"aa8098456f6ff7ff3ac9092384932230498bc980")
)
assert hash(ski1) == hash(ski2)
assert hash(ski1) != hash(ski3)
class TestAuthorityKeyIdentifier(object):
def test_authority_cert_issuer_not_generalname(self):
with pytest.raises(TypeError):
x509.AuthorityKeyIdentifier(b"identifier", ["notname"], 3)
def test_authority_cert_serial_number_not_integer(self):
dirname = x509.DirectoryName(
x509.Name([
x509.NameAttribute(
x509.ObjectIdentifier('2.999.1'),
u'value1'
),
x509.NameAttribute(
x509.ObjectIdentifier('2.999.2'),
u'value2'
),
])
)
with pytest.raises(TypeError):
x509.AuthorityKeyIdentifier(b"identifier", [dirname], "notanint")
def test_authority_issuer_none_serial_not_none(self):
with pytest.raises(ValueError):
x509.AuthorityKeyIdentifier(b"identifier", None, 3)
def test_authority_issuer_not_none_serial_none(self):
dirname = x509.DirectoryName(
x509.Name([
x509.NameAttribute(
x509.ObjectIdentifier('2.999.1'),
u'value1'
),
x509.NameAttribute(
x509.ObjectIdentifier('2.999.2'),
u'value2'
),
])
)
with pytest.raises(ValueError):
x509.AuthorityKeyIdentifier(b"identifier", [dirname], None)
def test_authority_cert_serial_and_issuer_none(self):
aki = x509.AuthorityKeyIdentifier(b"id", None, None)
assert aki.key_identifier == b"id"
assert aki.authority_cert_issuer is None
assert aki.authority_cert_serial_number is None
def test_authority_cert_serial_zero(self):
dns = x509.DNSName(u"SomeIssuer")
aki = x509.AuthorityKeyIdentifier(b"id", [dns], 0)
assert aki.key_identifier == b"id"
assert aki.authority_cert_issuer == [dns]
assert aki.authority_cert_serial_number == 0
def test_iter_input(self):
dirnames = [
x509.DirectoryName(
x509.Name([x509.NameAttribute(NameOID.COMMON_NAME, u'myCN')])
)
]
aki = x509.AuthorityKeyIdentifier(b"digest", iter(dirnames), 1234)
assert list(aki.authority_cert_issuer) == dirnames
def test_repr(self):
dirname = x509.DirectoryName(
x509.Name([x509.NameAttribute(NameOID.COMMON_NAME, u'myCN')])
)
aki = x509.AuthorityKeyIdentifier(b"digest", [dirname], 1234)
if six.PY3:
assert repr(aki) == (
"<AuthorityKeyIdentifier(key_identifier=b'digest', authority_"
"cert_issuer=[<DirectoryName(value=<Name([<NameAttribute(oid="
"<ObjectIdentifier(oid=2.5.4.3, name=commonName)>, value='myC"
"N')>])>)>], authority_cert_serial_number=1234)>"
)
else:
assert repr(aki) == (
"<AuthorityKeyIdentifier(key_identifier='digest', authority_ce"
"rt_issuer=[<DirectoryName(value=<Name([<NameAttribute(oid=<Ob"
"jectIdentifier(oid=2.5.4.3, name=commonName)>, value=u'myCN')"
">])>)>], authority_cert_serial_number=1234)>"
)
def test_eq(self):
dirname = x509.DirectoryName(
x509.Name([x509.NameAttribute(NameOID.COMMON_NAME, u'myCN')])
)
aki = x509.AuthorityKeyIdentifier(b"digest", [dirname], 1234)
dirname2 = x509.DirectoryName(
x509.Name([x509.NameAttribute(NameOID.COMMON_NAME, u'myCN')])
)
aki2 = x509.AuthorityKeyIdentifier(b"digest", [dirname2], 1234)
assert aki == aki2
def test_ne(self):
dirname = x509.DirectoryName(
x509.Name([x509.NameAttribute(NameOID.COMMON_NAME, u'myCN')])
)
dirname5 = x509.DirectoryName(
x509.Name([x509.NameAttribute(NameOID.COMMON_NAME, u'aCN')])
)
aki = x509.AuthorityKeyIdentifier(b"digest", [dirname], 1234)
aki2 = x509.AuthorityKeyIdentifier(b"diges", [dirname], 1234)
aki3 = x509.AuthorityKeyIdentifier(b"digest", None, None)
aki4 = x509.AuthorityKeyIdentifier(b"digest", [dirname], 12345)
aki5 = x509.AuthorityKeyIdentifier(b"digest", [dirname5], 12345)
assert aki != aki2
assert aki != aki3
assert aki != aki4
assert aki != aki5
assert aki != object()
class TestBasicConstraints(object):
def test_ca_not_boolean(self):
with pytest.raises(TypeError):
x509.BasicConstraints(ca="notbool", path_length=None)
def test_path_length_not_ca(self):
with pytest.raises(ValueError):
x509.BasicConstraints(ca=False, path_length=0)
def test_path_length_not_int(self):
with pytest.raises(TypeError):
x509.BasicConstraints(ca=True, path_length=1.1)
with pytest.raises(TypeError):
x509.BasicConstraints(ca=True, path_length="notint")
def test_path_length_negative(self):
with pytest.raises(TypeError):
x509.BasicConstraints(ca=True, path_length=-1)
def test_repr(self):
na = x509.BasicConstraints(ca=True, path_length=None)
assert repr(na) == (
"<BasicConstraints(ca=True, path_length=None)>"
)
def test_hash(self):
na = x509.BasicConstraints(ca=True, path_length=None)
na2 = x509.BasicConstraints(ca=True, path_length=None)
na3 = x509.BasicConstraints(ca=True, path_length=0)
assert hash(na) == hash(na2)
assert hash(na) != hash(na3)
def test_eq(self):
na = x509.BasicConstraints(ca=True, path_length=None)
na2 = x509.BasicConstraints(ca=True, path_length=None)
assert na == na2
def test_ne(self):
na = x509.BasicConstraints(ca=True, path_length=None)
na2 = x509.BasicConstraints(ca=True, path_length=1)
na3 = x509.BasicConstraints(ca=False, path_length=None)
assert na != na2
assert na != na3
assert na != object()
class TestExtendedKeyUsage(object):
def test_not_all_oids(self):
with pytest.raises(TypeError):
x509.ExtendedKeyUsage(["notoid"])
def test_iter_len(self):
eku = x509.ExtendedKeyUsage([
x509.ObjectIdentifier("1.3.6.1.5.5.7.3.1"),
x509.ObjectIdentifier("1.3.6.1.5.5.7.3.2"),
])
assert len(eku) == 2
assert list(eku) == [
ExtendedKeyUsageOID.SERVER_AUTH,
ExtendedKeyUsageOID.CLIENT_AUTH
]
def test_iter_input(self):
usages = [
x509.ObjectIdentifier("1.3.6.1.5.5.7.3.1"),
x509.ObjectIdentifier("1.3.6.1.5.5.7.3.2"),
]
aia = x509.ExtendedKeyUsage(iter(usages))
assert list(aia) == usages
def test_repr(self):
eku = x509.ExtendedKeyUsage([
x509.ObjectIdentifier("1.3.6.1.5.5.7.3.1"),
x509.ObjectIdentifier("1.3.6.1.5.5.7.3.2"),
])
assert repr(eku) == (
"<ExtendedKeyUsage([<ObjectIdentifier(oid=1.3.6.1.5.5.7.3.1, name="
"serverAuth)>, <ObjectIdentifier(oid=1.3.6.1.5.5.7.3.2, name=clien"
"tAuth)>])>"
)
def test_eq(self):
eku = x509.ExtendedKeyUsage([
x509.ObjectIdentifier("1.3.6"), x509.ObjectIdentifier("1.3.7")
])
eku2 = x509.ExtendedKeyUsage([
x509.ObjectIdentifier("1.3.6"), x509.ObjectIdentifier("1.3.7")
])
assert eku == eku2
def test_ne(self):
eku = x509.ExtendedKeyUsage([x509.ObjectIdentifier("1.3.6")])
eku2 = x509.ExtendedKeyUsage([x509.ObjectIdentifier("1.3.6.1")])
assert eku != eku2
assert eku != object()
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
class TestExtensions(object):
def test_no_extensions(self, backend):
cert = _load_cert(
os.path.join("x509", "verisign_md2_root.pem"),
x509.load_pem_x509_certificate,
backend
)
ext = cert.extensions
assert len(ext) == 0
assert list(ext) == []
with pytest.raises(x509.ExtensionNotFound) as exc:
ext.get_extension_for_oid(ExtensionOID.BASIC_CONSTRAINTS)
assert exc.value.oid == ExtensionOID.BASIC_CONSTRAINTS
def test_one_extension(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "basic_constraints_not_critical.pem"
),
x509.load_pem_x509_certificate,
backend
)
extensions = cert.extensions
ext = extensions.get_extension_for_oid(ExtensionOID.BASIC_CONSTRAINTS)
assert ext is not None
assert ext.value.ca is False
def test_duplicate_extension(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "two_basic_constraints.pem"
),
x509.load_pem_x509_certificate,
backend
)
with pytest.raises(x509.DuplicateExtension) as exc:
cert.extensions
assert exc.value.oid == ExtensionOID.BASIC_CONSTRAINTS
def test_unsupported_critical_extension(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "unsupported_extension_critical.pem"
),
x509.load_pem_x509_certificate,
backend
)
with pytest.raises(x509.UnsupportedExtension) as exc:
cert.extensions
assert exc.value.oid == x509.ObjectIdentifier("1.2.3.4")
@pytest.mark.requires_backend_interface(interface=EllipticCurveBackend)
def test_unsupported_extension(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "unsupported_extension_2.pem"
),
x509.load_pem_x509_certificate,
backend
)
extensions = cert.extensions
assert len(extensions) == 2
assert extensions[0].critical is False
assert extensions[0].oid == x509.ObjectIdentifier(
"1.3.6.1.4.1.41482.2"
)
assert extensions[0].value == x509.UnrecognizedExtension(
x509.ObjectIdentifier("1.3.6.1.4.1.41482.2"),
b"1.3.6.1.4.1.41482.1.2"
)
assert extensions[1].critical is False
assert extensions[1].oid == x509.ObjectIdentifier(
"1.3.6.1.4.1.45724.2.1.1"
)
assert extensions[1].value == x509.UnrecognizedExtension(
x509.ObjectIdentifier("1.3.6.1.4.1.45724.2.1.1"),
b"\x03\x02\x040"
)
def test_no_extensions_get_for_class(self, backend):
cert = _load_cert(
os.path.join(
"x509", "cryptography.io.pem"
),
x509.load_pem_x509_certificate,
backend
)
exts = cert.extensions
with pytest.raises(x509.ExtensionNotFound) as exc:
exts.get_extension_for_class(x509.IssuerAlternativeName)
assert exc.value.oid == ExtensionOID.ISSUER_ALTERNATIVE_NAME
def test_unrecognized_extension_for_class(self):
exts = x509.Extensions([])
with pytest.raises(TypeError):
exts.get_extension_for_class(x509.UnrecognizedExtension)
def test_indexing(self, backend):
cert = _load_cert(
os.path.join("x509", "cryptography.io.pem"),
x509.load_pem_x509_certificate,
backend
)
exts = cert.extensions
assert exts[-1] == exts[7]
assert exts[2:6:2] == [exts[2], exts[4]]
def test_one_extension_get_for_class(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "basic_constraints_not_critical.pem"
),
x509.load_pem_x509_certificate,
backend
)
ext = cert.extensions.get_extension_for_class(x509.BasicConstraints)
assert ext is not None
assert isinstance(ext.value, x509.BasicConstraints)
def test_repr(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "basic_constraints_not_critical.pem"
),
x509.load_pem_x509_certificate,
backend
)
assert repr(cert.extensions) == (
"<Extensions([<Extension(oid=<ObjectIdentifier(oid=2.5.29.19, name"
"=basicConstraints)>, critical=False, value=<BasicConstraints(ca=F"
"alse, path_length=None)>)>])>"
)
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
class TestBasicConstraintsExtension(object):
def test_ca_true_pathlen_6(self, backend):
cert = _load_cert(
os.path.join(
"x509", "PKITS_data", "certs", "pathLenConstraint6CACert.crt"
),
x509.load_der_x509_certificate,
backend
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.BASIC_CONSTRAINTS
)
assert ext is not None
assert ext.critical is True
assert ext.value.ca is True
assert ext.value.path_length == 6
def test_path_length_zero(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "bc_path_length_zero.pem"),
x509.load_pem_x509_certificate,
backend
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.BASIC_CONSTRAINTS
)
assert ext is not None
assert ext.critical is True
assert ext.value.ca is True
assert ext.value.path_length == 0
def test_ca_true_no_pathlen(self, backend):
cert = _load_cert(
os.path.join("x509", "PKITS_data", "certs", "GoodCACert.crt"),
x509.load_der_x509_certificate,
backend
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.BASIC_CONSTRAINTS
)
assert ext is not None
assert ext.critical is True
assert ext.value.ca is True
assert ext.value.path_length is None
def test_ca_false(self, backend):
cert = _load_cert(
os.path.join("x509", "cryptography.io.pem"),
x509.load_pem_x509_certificate,
backend
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.BASIC_CONSTRAINTS
)
assert ext is not None
assert ext.critical is True
assert ext.value.ca is False
assert ext.value.path_length is None
def test_no_basic_constraints(self, backend):
cert = _load_cert(
os.path.join(
"x509",
"PKITS_data",
"certs",
"ValidCertificatePathTest1EE.crt"
),
x509.load_der_x509_certificate,
backend
)
with pytest.raises(x509.ExtensionNotFound):
cert.extensions.get_extension_for_oid(
ExtensionOID.BASIC_CONSTRAINTS
)
def test_basic_constraint_not_critical(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "basic_constraints_not_critical.pem"
),
x509.load_pem_x509_certificate,
backend
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.BASIC_CONSTRAINTS
)
assert ext is not None
assert ext.critical is False
assert ext.value.ca is False
class TestSubjectKeyIdentifierExtension(object):
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
def test_subject_key_identifier(self, backend):
cert = _load_cert(
os.path.join("x509", "PKITS_data", "certs", "GoodCACert.crt"),
x509.load_der_x509_certificate,
backend
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.SUBJECT_KEY_IDENTIFIER
)
ski = ext.value
assert ext is not None
assert ext.critical is False
assert ski.digest == binascii.unhexlify(
b"580184241bbc2b52944a3da510721451f5af3ac9"
)
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
def test_no_subject_key_identifier(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "bc_path_length_zero.pem"),
x509.load_pem_x509_certificate,
backend
)
with pytest.raises(x509.ExtensionNotFound):
cert.extensions.get_extension_for_oid(
ExtensionOID.SUBJECT_KEY_IDENTIFIER
)
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
def test_from_rsa_public_key(self, backend):
cert = _load_cert(
os.path.join("x509", "PKITS_data", "certs", "GoodCACert.crt"),
x509.load_der_x509_certificate,
backend
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.SUBJECT_KEY_IDENTIFIER
)
ski = x509.SubjectKeyIdentifier.from_public_key(
cert.public_key()
)
assert ext.value == ski
@pytest.mark.requires_backend_interface(interface=DSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
def test_from_dsa_public_key(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "dsa_selfsigned_ca.pem"),
x509.load_pem_x509_certificate,
backend
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.SUBJECT_KEY_IDENTIFIER
)
ski = x509.SubjectKeyIdentifier.from_public_key(
cert.public_key()
)
assert ext.value == ski
@pytest.mark.requires_backend_interface(interface=EllipticCurveBackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
def test_from_ec_public_key(self, backend):
_skip_curve_unsupported(backend, ec.SECP384R1())
cert = _load_cert(
os.path.join("x509", "ecdsa_root.pem"),
x509.load_pem_x509_certificate,
backend
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.SUBJECT_KEY_IDENTIFIER
)
ski = x509.SubjectKeyIdentifier.from_public_key(
cert.public_key()
)
assert ext.value == ski
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
class TestKeyUsageExtension(object):
def test_no_key_usage(self, backend):
cert = _load_cert(
os.path.join("x509", "verisign_md2_root.pem"),
x509.load_pem_x509_certificate,
backend
)
ext = cert.extensions
with pytest.raises(x509.ExtensionNotFound) as exc:
ext.get_extension_for_oid(ExtensionOID.KEY_USAGE)
assert exc.value.oid == ExtensionOID.KEY_USAGE
def test_all_purposes(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "all_key_usages.pem"
),
x509.load_pem_x509_certificate,
backend
)
extensions = cert.extensions
ext = extensions.get_extension_for_oid(ExtensionOID.KEY_USAGE)
assert ext is not None
ku = ext.value
assert ku.digital_signature is True
assert ku.content_commitment is True
assert ku.key_encipherment is True
assert ku.data_encipherment is True
assert ku.key_agreement is True
assert ku.key_cert_sign is True
assert ku.crl_sign is True
assert ku.encipher_only is True
assert ku.decipher_only is True
def test_key_cert_sign_crl_sign(self, backend):
cert = _load_cert(
os.path.join(
"x509", "PKITS_data", "certs", "pathLenConstraint6CACert.crt"
),
x509.load_der_x509_certificate,
backend
)
ext = cert.extensions.get_extension_for_oid(ExtensionOID.KEY_USAGE)
assert ext is not None
assert ext.critical is True
ku = ext.value
assert ku.digital_signature is False
assert ku.content_commitment is False
assert ku.key_encipherment is False
assert ku.data_encipherment is False
assert ku.key_agreement is False
assert ku.key_cert_sign is True
assert ku.crl_sign is True
@pytest.mark.parametrize(
"name", [
x509.RFC822Name,
x509.DNSName,
x509.UniformResourceIdentifier
]
)
class TestTextGeneralNames(object):
def test_not_text(self, name):
with pytest.raises(TypeError):
name(b"notaunicodestring")
with pytest.raises(TypeError):
name(1.3)
def test_repr(self, name):
gn = name(u"string")
assert repr(gn) == "<{0}(value=string)>".format(name.__name__)
def test_eq(self, name):
gn = name(u"string")
gn2 = name(u"string")
assert gn == gn2
def test_ne(self, name):
gn = name(u"string")
gn2 = name(u"string2")
assert gn != gn2
assert gn != object()
class TestDirectoryName(object):
def test_not_name(self):
with pytest.raises(TypeError):
x509.DirectoryName(b"notaname")
with pytest.raises(TypeError):
x509.DirectoryName(1.3)
def test_repr(self):
name = x509.Name([x509.NameAttribute(NameOID.COMMON_NAME, u'value1')])
gn = x509.DirectoryName(name)
if six.PY3:
assert repr(gn) == (
"<DirectoryName(value=<Name([<NameAttribute(oid=<ObjectIdentif"
"ier(oid=2.5.4.3, name=commonName)>, value='value1')>])>)>"
)
else:
assert repr(gn) == (
"<DirectoryName(value=<Name([<NameAttribute(oid=<ObjectIdentif"
"ier(oid=2.5.4.3, name=commonName)>, value=u'value1')>])>)>"
)
def test_eq(self):
name = x509.Name([
x509.NameAttribute(x509.ObjectIdentifier('2.999.1'), u'value1')
])
name2 = x509.Name([
x509.NameAttribute(x509.ObjectIdentifier('2.999.1'), u'value1')
])
gn = x509.DirectoryName(name)
gn2 = x509.DirectoryName(name2)
assert gn == gn2
def test_ne(self):
name = x509.Name([
x509.NameAttribute(x509.ObjectIdentifier('2.999.1'), u'value1')
])
name2 = x509.Name([
x509.NameAttribute(x509.ObjectIdentifier('2.999.2'), u'value2')
])
gn = x509.DirectoryName(name)
gn2 = x509.DirectoryName(name2)
assert gn != gn2
assert gn != object()
class TestRFC822Name(object):
def test_invalid_email(self):
with pytest.raises(ValueError):
x509.RFC822Name(u"Name <email>")
with pytest.raises(ValueError):
x509.RFC822Name(u"")
def test_single_label(self):
gn = x509.RFC822Name(u"administrator")
assert gn.value == u"administrator"
def test_idna(self):
gn = x509.RFC822Name(u"email@em\xe5\xefl.com")
assert gn.value == u"email@em\xe5\xefl.com"
assert gn._encoded == b"email@xn--eml-vla4c.com"
def test_hash(self):
g1 = x509.RFC822Name(u"email@host.com")
g2 = x509.RFC822Name(u"email@host.com")
g3 = x509.RFC822Name(u"admin@host.com")
assert hash(g1) == hash(g2)
assert hash(g1) != hash(g3)
class TestUniformResourceIdentifier(object):
def test_no_parsed_hostname(self):
gn = x509.UniformResourceIdentifier(u"singlelabel")
assert gn.value == u"singlelabel"
def test_with_port(self):
gn = x509.UniformResourceIdentifier(u"singlelabel:443/test")
assert gn.value == u"singlelabel:443/test"
def test_idna_no_port(self):
gn = x509.UniformResourceIdentifier(
u"http://\u043f\u044b\u043a\u0430.cryptography"
)
assert gn.value == u"http://\u043f\u044b\u043a\u0430.cryptography"
assert gn._encoded == b"http://xn--80ato2c.cryptography"
def test_idna_with_port(self):
gn = x509.UniformResourceIdentifier(
u"gopher://\u043f\u044b\u043a\u0430.cryptography:70/some/path"
)
assert gn.value == (
u"gopher://\u043f\u044b\u043a\u0430.cryptography:70/some/path"
)
assert gn._encoded == b"gopher://xn--80ato2c.cryptography:70/some/path"
def test_query_and_fragment(self):
gn = x509.UniformResourceIdentifier(
u"ldap://cryptography:90/path?query=true#somedata"
)
assert gn.value == u"ldap://cryptography:90/path?query=true#somedata"
def test_hash(self):
g1 = x509.UniformResourceIdentifier(u"http://host.com")
g2 = x509.UniformResourceIdentifier(u"http://host.com")
g3 = x509.UniformResourceIdentifier(u"http://other.com")
assert hash(g1) == hash(g2)
assert hash(g1) != hash(g3)
class TestRegisteredID(object):
def test_not_oid(self):
with pytest.raises(TypeError):
x509.RegisteredID(b"notanoid")
with pytest.raises(TypeError):
x509.RegisteredID(1.3)
def test_repr(self):
gn = x509.RegisteredID(NameOID.COMMON_NAME)
assert repr(gn) == (
"<RegisteredID(value=<ObjectIdentifier(oid=2.5.4.3, name=commonNam"
"e)>)>"
)
def test_eq(self):
gn = x509.RegisteredID(NameOID.COMMON_NAME)
gn2 = x509.RegisteredID(NameOID.COMMON_NAME)
assert gn == gn2
def test_ne(self):
gn = x509.RegisteredID(NameOID.COMMON_NAME)
gn2 = x509.RegisteredID(ExtensionOID.BASIC_CONSTRAINTS)
assert gn != gn2
assert gn != object()
class TestIPAddress(object):
def test_not_ipaddress(self):
with pytest.raises(TypeError):
x509.IPAddress(b"notanipaddress")
with pytest.raises(TypeError):
x509.IPAddress(1.3)
def test_repr(self):
gn = x509.IPAddress(ipaddress.IPv4Address(u"127.0.0.1"))
assert repr(gn) == "<IPAddress(value=127.0.0.1)>"
gn2 = x509.IPAddress(ipaddress.IPv6Address(u"ff::"))
assert repr(gn2) == "<IPAddress(value=ff::)>"
gn3 = x509.IPAddress(ipaddress.IPv4Network(u"192.168.0.0/24"))
assert repr(gn3) == "<IPAddress(value=192.168.0.0/24)>"
gn4 = x509.IPAddress(ipaddress.IPv6Network(u"ff::/96"))
assert repr(gn4) == "<IPAddress(value=ff::/96)>"
def test_eq(self):
gn = x509.IPAddress(ipaddress.IPv4Address(u"127.0.0.1"))
gn2 = x509.IPAddress(ipaddress.IPv4Address(u"127.0.0.1"))
assert gn == gn2
def test_ne(self):
gn = x509.IPAddress(ipaddress.IPv4Address(u"127.0.0.1"))
gn2 = x509.IPAddress(ipaddress.IPv4Address(u"127.0.0.2"))
assert gn != gn2
assert gn != object()
class TestOtherName(object):
def test_invalid_args(self):
with pytest.raises(TypeError):
x509.OtherName(b"notanobjectidentifier", b"derdata")
with pytest.raises(TypeError):
x509.OtherName(x509.ObjectIdentifier("1.2.3.4"), u"notderdata")
def test_repr(self):
gn = x509.OtherName(x509.ObjectIdentifier("1.2.3.4"), b"derdata")
if six.PY3:
assert repr(gn) == (
"<OtherName(type_id=<ObjectIdentifier(oid=1.2.3.4, "
"name=Unknown OID)>, value=b'derdata')>"
)
else:
assert repr(gn) == (
"<OtherName(type_id=<ObjectIdentifier(oid=1.2.3.4, "
"name=Unknown OID)>, value='derdata')>"
)
gn = x509.OtherName(x509.ObjectIdentifier("2.5.4.65"), b"derdata")
if six.PY3:
assert repr(gn) == (
"<OtherName(type_id=<ObjectIdentifier(oid=2.5.4.65, "
"name=pseudonym)>, value=b'derdata')>"
)
else:
assert repr(gn) == (
"<OtherName(type_id=<ObjectIdentifier(oid=2.5.4.65, "
"name=pseudonym)>, value='derdata')>"
)
def test_eq(self):
gn = x509.OtherName(x509.ObjectIdentifier("1.2.3.4"), b"derdata")
gn2 = x509.OtherName(x509.ObjectIdentifier("1.2.3.4"), b"derdata")
assert gn == gn2
def test_ne(self):
gn = x509.OtherName(x509.ObjectIdentifier("1.2.3.4"), b"derdata")
assert gn != object()
gn2 = x509.OtherName(x509.ObjectIdentifier("1.2.3.4"), b"derdata2")
assert gn != gn2
gn2 = x509.OtherName(x509.ObjectIdentifier("1.2.3.5"), b"derdata")
assert gn != gn2
class TestGeneralNames(object):
def test_get_values_for_type(self):
gns = x509.GeneralNames(
[x509.DNSName(u"cryptography.io")]
)
names = gns.get_values_for_type(x509.DNSName)
assert names == [u"cryptography.io"]
def test_iter_names(self):
gns = x509.GeneralNames([
x509.DNSName(u"cryptography.io"),
x509.DNSName(u"crypto.local"),
])
assert len(gns) == 2
assert list(gns) == [
x509.DNSName(u"cryptography.io"),
x509.DNSName(u"crypto.local"),
]
def test_iter_input(self):
names = [
x509.DNSName(u"cryptography.io"),
x509.DNSName(u"crypto.local"),
]
gns = x509.GeneralNames(iter(names))
assert list(gns) == names
def test_indexing(self):
gn = x509.GeneralNames([
x509.DNSName(u"cryptography.io"),
x509.DNSName(u"crypto.local"),
x509.DNSName(u"another.local"),
x509.RFC822Name(u"email@another.local"),
x509.UniformResourceIdentifier(u"http://another.local"),
])
assert gn[-1] == gn[4]
assert gn[2:6:2] == [gn[2], gn[4]]
def test_invalid_general_names(self):
with pytest.raises(TypeError):
x509.GeneralNames(
[x509.DNSName(u"cryptography.io"), "invalid"]
)
def test_repr(self):
gns = x509.GeneralNames(
[
x509.DNSName(u"cryptography.io")
]
)
assert repr(gns) == (
"<GeneralNames([<DNSName(value=cryptography.io)>])>"
)
def test_eq(self):
gns = x509.GeneralNames(
[x509.DNSName(u"cryptography.io")]
)
gns2 = x509.GeneralNames(
[x509.DNSName(u"cryptography.io")]
)
assert gns == gns2
def test_ne(self):
gns = x509.GeneralNames(
[x509.DNSName(u"cryptography.io")]
)
gns2 = x509.GeneralNames(
[x509.RFC822Name(u"admin@cryptography.io")]
)
assert gns != gns2
assert gns != object()
class TestIssuerAlternativeName(object):
def test_get_values_for_type(self):
san = x509.IssuerAlternativeName(
[x509.DNSName(u"cryptography.io")]
)
names = san.get_values_for_type(x509.DNSName)
assert names == [u"cryptography.io"]
def test_iter_names(self):
san = x509.IssuerAlternativeName([
x509.DNSName(u"cryptography.io"),
x509.DNSName(u"crypto.local"),
])
assert len(san) == 2
assert list(san) == [
x509.DNSName(u"cryptography.io"),
x509.DNSName(u"crypto.local"),
]
def test_indexing(self):
ian = x509.IssuerAlternativeName([
x509.DNSName(u"cryptography.io"),
x509.DNSName(u"crypto.local"),
x509.DNSName(u"another.local"),
x509.RFC822Name(u"email@another.local"),
x509.UniformResourceIdentifier(u"http://another.local"),
])
assert ian[-1] == ian[4]
assert ian[2:6:2] == [ian[2], ian[4]]
def test_invalid_general_names(self):
with pytest.raises(TypeError):
x509.IssuerAlternativeName(
[x509.DNSName(u"cryptography.io"), "invalid"]
)
def test_repr(self):
san = x509.IssuerAlternativeName(
[
x509.DNSName(u"cryptography.io")
]
)
assert repr(san) == (
"<IssuerAlternativeName("
"<GeneralNames([<DNSName(value=cryptography.io)>])>)>"
)
def test_eq(self):
san = x509.IssuerAlternativeName(
[x509.DNSName(u"cryptography.io")]
)
san2 = x509.IssuerAlternativeName(
[x509.DNSName(u"cryptography.io")]
)
assert san == san2
def test_ne(self):
san = x509.IssuerAlternativeName(
[x509.DNSName(u"cryptography.io")]
)
san2 = x509.IssuerAlternativeName(
[x509.RFC822Name(u"admin@cryptography.io")]
)
assert san != san2
assert san != object()
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
class TestRSAIssuerAlternativeNameExtension(object):
def test_uri(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "ian_uri.pem"),
x509.load_pem_x509_certificate,
backend,
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.ISSUER_ALTERNATIVE_NAME
)
assert list(ext.value) == [
x509.UniformResourceIdentifier(u"http://path.to.root/root.crt"),
]
class TestCRLNumber(object):
def test_eq(self):
crl_number = x509.CRLNumber(15)
assert crl_number == x509.CRLNumber(15)
def test_ne(self):
crl_number = x509.CRLNumber(15)
assert crl_number != x509.CRLNumber(14)
assert crl_number != object()
def test_repr(self):
crl_number = x509.CRLNumber(15)
assert repr(crl_number) == "<CRLNumber(15)>"
def test_invalid_number(self):
with pytest.raises(TypeError):
x509.CRLNumber("notanumber")
def test_hash(self):
c1 = x509.CRLNumber(1)
c2 = x509.CRLNumber(1)
c3 = x509.CRLNumber(2)
assert hash(c1) == hash(c2)
assert hash(c1) != hash(c3)
class TestSubjectAlternativeName(object):
def test_get_values_for_type(self):
san = x509.SubjectAlternativeName(
[x509.DNSName(u"cryptography.io")]
)
names = san.get_values_for_type(x509.DNSName)
assert names == [u"cryptography.io"]
def test_iter_names(self):
san = x509.SubjectAlternativeName([
x509.DNSName(u"cryptography.io"),
x509.DNSName(u"crypto.local"),
])
assert len(san) == 2
assert list(san) == [
x509.DNSName(u"cryptography.io"),
x509.DNSName(u"crypto.local"),
]
def test_indexing(self):
san = x509.SubjectAlternativeName([
x509.DNSName(u"cryptography.io"),
x509.DNSName(u"crypto.local"),
x509.DNSName(u"another.local"),
x509.RFC822Name(u"email@another.local"),
x509.UniformResourceIdentifier(u"http://another.local"),
])
assert san[-1] == san[4]
assert san[2:6:2] == [san[2], san[4]]
def test_invalid_general_names(self):
with pytest.raises(TypeError):
x509.SubjectAlternativeName(
[x509.DNSName(u"cryptography.io"), "invalid"]
)
def test_repr(self):
san = x509.SubjectAlternativeName(
[
x509.DNSName(u"cryptography.io")
]
)
assert repr(san) == (
"<SubjectAlternativeName("
"<GeneralNames([<DNSName(value=cryptography.io)>])>)>"
)
def test_eq(self):
san = x509.SubjectAlternativeName(
[x509.DNSName(u"cryptography.io")]
)
san2 = x509.SubjectAlternativeName(
[x509.DNSName(u"cryptography.io")]
)
assert san == san2
def test_ne(self):
san = x509.SubjectAlternativeName(
[x509.DNSName(u"cryptography.io")]
)
san2 = x509.SubjectAlternativeName(
[x509.RFC822Name(u"admin@cryptography.io")]
)
assert san != san2
assert san != object()
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
class TestRSASubjectAlternativeNameExtension(object):
def test_dns_name(self, backend):
cert = _load_cert(
os.path.join("x509", "cryptography.io.pem"),
x509.load_pem_x509_certificate,
backend
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.SUBJECT_ALTERNATIVE_NAME
)
assert ext is not None
assert ext.critical is False
san = ext.value
dns = san.get_values_for_type(x509.DNSName)
assert dns == [u"www.cryptography.io", u"cryptography.io"]
def test_wildcard_dns_name(self, backend):
cert = _load_cert(
os.path.join("x509", "wildcard_san.pem"),
x509.load_pem_x509_certificate,
backend
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.SUBJECT_ALTERNATIVE_NAME
)
dns = ext.value.get_values_for_type(x509.DNSName)
assert dns == [
u'*.langui.sh',
u'langui.sh',
u'*.saseliminator.com',
u'saseliminator.com'
]
def test_san_empty_hostname(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "san_empty_hostname.pem"
),
x509.load_pem_x509_certificate,
backend
)
san = cert.extensions.get_extension_for_oid(
ExtensionOID.SUBJECT_ALTERNATIVE_NAME
)
dns = san.value.get_values_for_type(x509.DNSName)
assert dns == [u'']
def test_san_wildcard_idna_dns_name(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "san_wildcard_idna.pem"),
x509.load_pem_x509_certificate,
backend
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.SUBJECT_ALTERNATIVE_NAME
)
dns = ext.value.get_values_for_type(x509.DNSName)
assert dns == [u'*.\u043f\u044b\u043a\u0430.cryptography']
def test_unsupported_gn(self, backend):
cert = _load_cert(
os.path.join("x509", "san_x400address.der"),
x509.load_der_x509_certificate,
backend
)
with pytest.raises(x509.UnsupportedGeneralNameType) as exc:
cert.extensions
assert exc.value.type == 3
def test_registered_id(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "san_registered_id.pem"
),
x509.load_pem_x509_certificate,
backend
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.SUBJECT_ALTERNATIVE_NAME
)
assert ext is not None
assert ext.critical is False
san = ext.value
rid = san.get_values_for_type(x509.RegisteredID)
assert rid == [x509.ObjectIdentifier("1.2.3.4")]
def test_uri(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "san_uri_with_port.pem"
),
x509.load_pem_x509_certificate,
backend
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.SUBJECT_ALTERNATIVE_NAME
)
assert ext is not None
uri = ext.value.get_values_for_type(
x509.UniformResourceIdentifier
)
assert uri == [
u"gopher://\u043f\u044b\u043a\u0430.cryptography:70/path?q=s#hel"
u"lo",
u"http://someregulardomain.com",
]
def test_ipaddress(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "san_ipaddr.pem"
),
x509.load_pem_x509_certificate,
backend
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.SUBJECT_ALTERNATIVE_NAME
)
assert ext is not None
assert ext.critical is False
san = ext.value
ip = san.get_values_for_type(x509.IPAddress)
assert [
ipaddress.ip_address(u"127.0.0.1"),
ipaddress.ip_address(u"ff::")
] == ip
def test_dirname(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "san_dirname.pem"
),
x509.load_pem_x509_certificate,
backend
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.SUBJECT_ALTERNATIVE_NAME
)
assert ext is not None
assert ext.critical is False
san = ext.value
dirname = san.get_values_for_type(x509.DirectoryName)
assert [
x509.Name([
x509.NameAttribute(NameOID.COMMON_NAME, u'test'),
x509.NameAttribute(NameOID.ORGANIZATION_NAME, u'Org'),
x509.NameAttribute(NameOID.STATE_OR_PROVINCE_NAME, u'Texas'),
])
] == dirname
def test_rfc822name(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "san_rfc822_idna.pem"
),
x509.load_pem_x509_certificate,
backend
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.SUBJECT_ALTERNATIVE_NAME
)
assert ext is not None
assert ext.critical is False
san = ext.value
rfc822name = san.get_values_for_type(x509.RFC822Name)
assert [u"email@em\xe5\xefl.com"] == rfc822name
def test_idna2003_invalid(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "san_idna2003_dnsname.pem"
),
x509.load_pem_x509_certificate,
backend
)
with pytest.raises(UnicodeError):
cert.extensions
def test_unicode_rfc822_name_dns_name_uri(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "san_idna_names.pem"
),
x509.load_pem_x509_certificate,
backend
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.SUBJECT_ALTERNATIVE_NAME
)
assert ext is not None
rfc822_name = ext.value.get_values_for_type(x509.RFC822Name)
dns_name = ext.value.get_values_for_type(x509.DNSName)
uri = ext.value.get_values_for_type(x509.UniformResourceIdentifier)
assert rfc822_name == [u"email@\u043f\u044b\u043a\u0430.cryptography"]
assert dns_name == [u"\u043f\u044b\u043a\u0430.cryptography"]
assert uri == [u"https://www.\u043f\u044b\u043a\u0430.cryptography"]
def test_rfc822name_dnsname_ipaddress_directoryname_uri(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "san_email_dns_ip_dirname_uri.pem"
),
x509.load_pem_x509_certificate,
backend
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.SUBJECT_ALTERNATIVE_NAME
)
assert ext is not None
assert ext.critical is False
san = ext.value
rfc822_name = san.get_values_for_type(x509.RFC822Name)
uri = san.get_values_for_type(x509.UniformResourceIdentifier)
dns = san.get_values_for_type(x509.DNSName)
ip = san.get_values_for_type(x509.IPAddress)
dirname = san.get_values_for_type(x509.DirectoryName)
assert [u"user@cryptography.io"] == rfc822_name
assert [u"https://cryptography.io"] == uri
assert [u"cryptography.io"] == dns
assert [
x509.Name([
x509.NameAttribute(NameOID.COMMON_NAME, u'dirCN'),
x509.NameAttribute(
NameOID.ORGANIZATION_NAME, u'Cryptographic Authority'
),
])
] == dirname
assert [
ipaddress.ip_address(u"127.0.0.1"),
ipaddress.ip_address(u"ff::")
] == ip
def test_invalid_rfc822name(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "san_rfc822_names.pem"
),
x509.load_pem_x509_certificate,
backend
)
with pytest.raises(ValueError) as exc:
cert.extensions
assert 'Invalid rfc822name value' in str(exc.value)
def test_other_name(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "san_other_name.pem"
),
x509.load_pem_x509_certificate,
backend
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.SUBJECT_ALTERNATIVE_NAME
)
assert ext is not None
assert ext.critical is False
expected = x509.OtherName(x509.ObjectIdentifier("1.2.3.4"),
b'\x16\x0bHello World')
assert len(ext.value) == 1
assert list(ext.value)[0] == expected
othernames = ext.value.get_values_for_type(x509.OtherName)
assert othernames == [expected]
def test_certbuilder(self, backend):
sans = [u'*.example.org', u'*.\xf5\xe4\xf6\xfc.example.com',
u'foobar.example.net']
private_key = RSA_KEY_2048.private_key(backend)
builder = _make_certbuilder(private_key)
builder = builder.add_extension(
SubjectAlternativeName(list(map(DNSName, sans))), True)
cert = builder.sign(private_key, hashes.SHA1(), backend)
result = [x.value for x in cert.extensions.get_extension_for_class(
SubjectAlternativeName).value]
assert result == sans
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
class TestExtendedKeyUsageExtension(object):
def test_eku(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "extended_key_usage.pem"
),
x509.load_pem_x509_certificate,
backend
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.EXTENDED_KEY_USAGE
)
assert ext is not None
assert ext.critical is False
assert [
x509.ObjectIdentifier("1.3.6.1.5.5.7.3.1"),
x509.ObjectIdentifier("1.3.6.1.5.5.7.3.2"),
x509.ObjectIdentifier("1.3.6.1.5.5.7.3.3"),
x509.ObjectIdentifier("1.3.6.1.5.5.7.3.4"),
x509.ObjectIdentifier("1.3.6.1.5.5.7.3.9"),
x509.ObjectIdentifier("1.3.6.1.5.5.7.3.8"),
x509.ObjectIdentifier("2.5.29.37.0"),
x509.ObjectIdentifier("2.16.840.1.113730.4.1"),
] == list(ext.value)
class TestAccessDescription(object):
def test_invalid_access_method(self):
with pytest.raises(TypeError):
x509.AccessDescription("notanoid", x509.DNSName(u"test"))
def test_invalid_access_location(self):
with pytest.raises(TypeError):
x509.AccessDescription(
AuthorityInformationAccessOID.CA_ISSUERS, "invalid"
)
def test_valid_nonstandard_method(self):
ad = x509.AccessDescription(
ObjectIdentifier("2.999.1"),
x509.UniformResourceIdentifier(u"http://example.com")
)
assert ad is not None
def test_repr(self):
ad = x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier(u"http://ocsp.domain.com")
)
assert repr(ad) == (
"<AccessDescription(access_method=<ObjectIdentifier(oid=1.3.6.1.5."
"5.7.48.1, name=OCSP)>, access_location=<UniformResourceIdentifier"
"(value=http://ocsp.domain.com)>)>"
)
def test_eq(self):
ad = x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier(u"http://ocsp.domain.com")
)
ad2 = x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier(u"http://ocsp.domain.com")
)
assert ad == ad2
def test_ne(self):
ad = x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier(u"http://ocsp.domain.com")
)
ad2 = x509.AccessDescription(
AuthorityInformationAccessOID.CA_ISSUERS,
x509.UniformResourceIdentifier(u"http://ocsp.domain.com")
)
ad3 = x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier(u"http://notthesame")
)
assert ad != ad2
assert ad != ad3
assert ad != object()
def test_hash(self):
ad = x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier(u"http://ocsp.domain.com")
)
ad2 = x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier(u"http://ocsp.domain.com")
)
ad3 = x509.AccessDescription(
AuthorityInformationAccessOID.CA_ISSUERS,
x509.UniformResourceIdentifier(u"http://ocsp.domain.com")
)
assert hash(ad) == hash(ad2)
assert hash(ad) != hash(ad3)
class TestPolicyConstraints(object):
def test_invalid_explicit_policy(self):
with pytest.raises(TypeError):
x509.PolicyConstraints("invalid", None)
def test_invalid_inhibit_policy(self):
with pytest.raises(TypeError):
x509.PolicyConstraints(None, "invalid")
def test_both_none(self):
with pytest.raises(ValueError):
x509.PolicyConstraints(None, None)
def test_repr(self):
pc = x509.PolicyConstraints(0, None)
assert repr(pc) == (
u"<PolicyConstraints(require_explicit_policy=0, inhibit_policy_ma"
u"pping=None)>"
)
def test_eq(self):
pc = x509.PolicyConstraints(2, 1)
pc2 = x509.PolicyConstraints(2, 1)
assert pc == pc2
def test_ne(self):
pc = x509.PolicyConstraints(2, 1)
pc2 = x509.PolicyConstraints(2, 2)
pc3 = x509.PolicyConstraints(3, 1)
assert pc != pc2
assert pc != pc3
assert pc != object()
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
class TestPolicyConstraintsExtension(object):
def test_inhibit_policy_mapping(self, backend):
cert = _load_cert(
os.path.join("x509", "department-of-state-root.pem"),
x509.load_pem_x509_certificate,
backend
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.POLICY_CONSTRAINTS,
)
assert ext.critical is True
assert ext.value == x509.PolicyConstraints(
require_explicit_policy=None, inhibit_policy_mapping=0,
)
def test_require_explicit_policy(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "policy_constraints_explicit.pem"),
x509.load_pem_x509_certificate,
backend
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.POLICY_CONSTRAINTS
)
assert ext.critical is True
assert ext.value == x509.PolicyConstraints(
require_explicit_policy=1, inhibit_policy_mapping=None,
)
class TestAuthorityInformationAccess(object):
def test_invalid_descriptions(self):
with pytest.raises(TypeError):
x509.AuthorityInformationAccess(["notanAccessDescription"])
def test_iter_len(self):
aia = x509.AuthorityInformationAccess([
x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier(u"http://ocsp.domain.com")
),
x509.AccessDescription(
AuthorityInformationAccessOID.CA_ISSUERS,
x509.UniformResourceIdentifier(u"http://domain.com/ca.crt")
)
])
assert len(aia) == 2
assert list(aia) == [
x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier(u"http://ocsp.domain.com")
),
x509.AccessDescription(
AuthorityInformationAccessOID.CA_ISSUERS,
x509.UniformResourceIdentifier(u"http://domain.com/ca.crt")
)
]
def test_iter_input(self):
desc = [
x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier(u"http://ocsp.domain.com")
)
]
aia = x509.AuthorityInformationAccess(iter(desc))
assert list(aia) == desc
def test_repr(self):
aia = x509.AuthorityInformationAccess([
x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier(u"http://ocsp.domain.com")
),
x509.AccessDescription(
AuthorityInformationAccessOID.CA_ISSUERS,
x509.UniformResourceIdentifier(u"http://domain.com/ca.crt")
)
])
assert repr(aia) == (
"<AuthorityInformationAccess([<AccessDescription(access_method=<Ob"
"jectIdentifier(oid=1.3.6.1.5.5.7.48.1, name=OCSP)>, access_locati"
"on=<UniformResourceIdentifier(value=http://ocsp.domain.com)>)>, <"
"AccessDescription(access_method=<ObjectIdentifier(oid=1.3.6.1.5.5"
".7.48.2, name=caIssuers)>, access_location=<UniformResourceIdenti"
"fier(value=http://domain.com/ca.crt)>)>])>"
)
def test_eq(self):
aia = x509.AuthorityInformationAccess([
x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier(u"http://ocsp.domain.com")
),
x509.AccessDescription(
AuthorityInformationAccessOID.CA_ISSUERS,
x509.UniformResourceIdentifier(u"http://domain.com/ca.crt")
)
])
aia2 = x509.AuthorityInformationAccess([
x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier(u"http://ocsp.domain.com")
),
x509.AccessDescription(
AuthorityInformationAccessOID.CA_ISSUERS,
x509.UniformResourceIdentifier(u"http://domain.com/ca.crt")
)
])
assert aia == aia2
def test_ne(self):
aia = x509.AuthorityInformationAccess([
x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier(u"http://ocsp.domain.com")
),
x509.AccessDescription(
AuthorityInformationAccessOID.CA_ISSUERS,
x509.UniformResourceIdentifier(u"http://domain.com/ca.crt")
)
])
aia2 = x509.AuthorityInformationAccess([
x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier(u"http://ocsp.domain.com")
),
])
assert aia != aia2
assert aia != object()
def test_indexing(self):
aia = x509.AuthorityInformationAccess([
x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier(u"http://ocsp.domain.com")
),
x509.AccessDescription(
AuthorityInformationAccessOID.CA_ISSUERS,
x509.UniformResourceIdentifier(u"http://domain.com/ca.crt")
),
x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier(u"http://ocsp2.domain.com")
),
x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier(u"http://ocsp3.domain.com")
),
x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier(u"http://ocsp4.domain.com")
),
])
assert aia[-1] == aia[4]
assert aia[2:6:2] == [aia[2], aia[4]]
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
class TestAuthorityInformationAccessExtension(object):
def test_aia_ocsp_ca_issuers(self, backend):
cert = _load_cert(
os.path.join("x509", "cryptography.io.pem"),
x509.load_pem_x509_certificate,
backend
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.AUTHORITY_INFORMATION_ACCESS
)
assert ext is not None
assert ext.critical is False
assert ext.value == x509.AuthorityInformationAccess([
x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier(u"http://gv.symcd.com")
),
x509.AccessDescription(
AuthorityInformationAccessOID.CA_ISSUERS,
x509.UniformResourceIdentifier(u"http://gv.symcb.com/gv.crt")
),
])
def test_aia_multiple_ocsp_ca_issuers(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "aia_ocsp_ca_issuers.pem"),
x509.load_pem_x509_certificate,
backend
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.AUTHORITY_INFORMATION_ACCESS
)
assert ext is not None
assert ext.critical is False
assert ext.value == x509.AuthorityInformationAccess([
x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier(u"http://ocsp.domain.com")
),
x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier(u"http://ocsp2.domain.com")
),
x509.AccessDescription(
AuthorityInformationAccessOID.CA_ISSUERS,
x509.DirectoryName(x509.Name([
x509.NameAttribute(NameOID.COMMON_NAME, u"myCN"),
x509.NameAttribute(NameOID.ORGANIZATION_NAME,
u"some Org"),
]))
),
])
def test_aia_ocsp_only(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "aia_ocsp.pem"),
x509.load_pem_x509_certificate,
backend
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.AUTHORITY_INFORMATION_ACCESS
)
assert ext is not None
assert ext.critical is False
assert ext.value == x509.AuthorityInformationAccess([
x509.AccessDescription(
AuthorityInformationAccessOID.OCSP,
x509.UniformResourceIdentifier(u"http://ocsp.domain.com")
),
])
def test_aia_ca_issuers_only(self, backend):
cert = _load_cert(
os.path.join("x509", "custom", "aia_ca_issuers.pem"),
x509.load_pem_x509_certificate,
backend
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.AUTHORITY_INFORMATION_ACCESS
)
assert ext is not None
assert ext.critical is False
assert ext.value == x509.AuthorityInformationAccess([
x509.AccessDescription(
AuthorityInformationAccessOID.CA_ISSUERS,
x509.DirectoryName(x509.Name([
x509.NameAttribute(NameOID.COMMON_NAME, u"myCN"),
x509.NameAttribute(NameOID.ORGANIZATION_NAME,
u"some Org"),
]))
),
])
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
class TestAuthorityKeyIdentifierExtension(object):
def test_aki_keyid(self, backend):
cert = _load_cert(
os.path.join(
"x509", "cryptography.io.pem"
),
x509.load_pem_x509_certificate,
backend
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.AUTHORITY_KEY_IDENTIFIER
)
assert ext is not None
assert ext.critical is False
assert ext.value.key_identifier == (
b"\xc3\x9c\xf3\xfc\xd3F\x084\xbb\xceF\x7f\xa0|[\xf3\xe2\x08\xcbY"
)
assert ext.value.authority_cert_issuer is None
assert ext.value.authority_cert_serial_number is None
def test_aki_all_fields(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "authority_key_identifier.pem"
),
x509.load_pem_x509_certificate,
backend
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.AUTHORITY_KEY_IDENTIFIER
)
assert ext is not None
assert ext.critical is False
assert ext.value.key_identifier == (
b"9E>\xca=b\x1d\xea\x86I\xf6Z\xab@\xb7\xa4p\x98\xf1\xec"
)
assert ext.value.authority_cert_issuer == [
x509.DirectoryName(
x509.Name([
x509.NameAttribute(
NameOID.ORGANIZATION_NAME, u"PyCA"
),
x509.NameAttribute(
NameOID.COMMON_NAME, u"cryptography.io"
)
])
)
]
assert ext.value.authority_cert_serial_number == 3
def test_aki_no_keyid(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "authority_key_identifier_no_keyid.pem"
),
x509.load_pem_x509_certificate,
backend
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.AUTHORITY_KEY_IDENTIFIER
)
assert ext is not None
assert ext.critical is False
assert ext.value.key_identifier is None
assert ext.value.authority_cert_issuer == [
x509.DirectoryName(
x509.Name([
x509.NameAttribute(
NameOID.ORGANIZATION_NAME, u"PyCA"
),
x509.NameAttribute(
NameOID.COMMON_NAME, u"cryptography.io"
)
])
)
]
assert ext.value.authority_cert_serial_number == 3
def test_from_certificate(self, backend):
issuer_cert = _load_cert(
os.path.join("x509", "rapidssl_sha256_ca_g3.pem"),
x509.load_pem_x509_certificate,
backend
)
cert = _load_cert(
os.path.join("x509", "cryptography.io.pem"),
x509.load_pem_x509_certificate,
backend
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.AUTHORITY_KEY_IDENTIFIER
)
aki = x509.AuthorityKeyIdentifier.from_issuer_public_key(
issuer_cert.public_key()
)
assert ext.value == aki
def test_from_issuer_subject_key_identifier(self, backend):
issuer_cert = _load_cert(
os.path.join("x509", "rapidssl_sha256_ca_g3.pem"),
x509.load_pem_x509_certificate,
backend
)
cert = _load_cert(
os.path.join("x509", "cryptography.io.pem"),
x509.load_pem_x509_certificate,
backend
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.AUTHORITY_KEY_IDENTIFIER
)
ski = issuer_cert.extensions.get_extension_for_class(
x509.SubjectKeyIdentifier
)
aki = x509.AuthorityKeyIdentifier.from_issuer_subject_key_identifier(
ski
)
assert ext.value == aki
class TestNameConstraints(object):
def test_ipaddress_wrong_type(self):
with pytest.raises(TypeError):
x509.NameConstraints(
permitted_subtrees=[
x509.IPAddress(ipaddress.IPv4Address(u"127.0.0.1"))
],
excluded_subtrees=None
)
with pytest.raises(TypeError):
x509.NameConstraints(
permitted_subtrees=None,
excluded_subtrees=[
x509.IPAddress(ipaddress.IPv4Address(u"127.0.0.1"))
]
)
def test_ipaddress_allowed_type(self):
permitted = [x509.IPAddress(ipaddress.IPv4Network(u"192.168.0.0/29"))]
excluded = [x509.IPAddress(ipaddress.IPv4Network(u"10.10.0.0/24"))]
nc = x509.NameConstraints(
permitted_subtrees=permitted,
excluded_subtrees=excluded
)
assert nc.permitted_subtrees == permitted
assert nc.excluded_subtrees == excluded
def test_invalid_permitted_subtrees(self):
with pytest.raises(TypeError):
x509.NameConstraints("badpermitted", None)
def test_invalid_excluded_subtrees(self):
with pytest.raises(TypeError):
x509.NameConstraints(None, "badexcluded")
def test_no_subtrees(self):
with pytest.raises(ValueError):
x509.NameConstraints(None, None)
def test_permitted_none(self):
excluded = [x509.DNSName(u"name.local")]
nc = x509.NameConstraints(
permitted_subtrees=None, excluded_subtrees=excluded
)
assert nc.permitted_subtrees is None
assert nc.excluded_subtrees is not None
def test_excluded_none(self):
permitted = [x509.DNSName(u"name.local")]
nc = x509.NameConstraints(
permitted_subtrees=permitted, excluded_subtrees=None
)
assert nc.permitted_subtrees is not None
assert nc.excluded_subtrees is None
def test_iter_input(self):
subtrees = [x509.IPAddress(ipaddress.IPv4Network(u"192.168.0.0/24"))]
nc = x509.NameConstraints(iter(subtrees), iter(subtrees))
assert list(nc.permitted_subtrees) == subtrees
assert list(nc.excluded_subtrees) == subtrees
def test_repr(self):
permitted = [x509.DNSName(u"name.local"), x509.DNSName(u"name2.local")]
nc = x509.NameConstraints(
permitted_subtrees=permitted,
excluded_subtrees=None
)
assert repr(nc) == (
"<NameConstraints(permitted_subtrees=[<DNSName(value=name.local)>"
", <DNSName(value=name2.local)>], excluded_subtrees=None)>"
)
def test_eq(self):
nc = x509.NameConstraints(
permitted_subtrees=[x509.DNSName(u"name.local")],
excluded_subtrees=[x509.DNSName(u"name2.local")]
)
nc2 = x509.NameConstraints(
permitted_subtrees=[x509.DNSName(u"name.local")],
excluded_subtrees=[x509.DNSName(u"name2.local")]
)
assert nc == nc2
def test_ne(self):
nc = x509.NameConstraints(
permitted_subtrees=[x509.DNSName(u"name.local")],
excluded_subtrees=[x509.DNSName(u"name2.local")]
)
nc2 = x509.NameConstraints(
permitted_subtrees=[x509.DNSName(u"name.local")],
excluded_subtrees=None
)
nc3 = x509.NameConstraints(
permitted_subtrees=None,
excluded_subtrees=[x509.DNSName(u"name2.local")]
)
assert nc != nc2
assert nc != nc3
assert nc != object()
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
class TestNameConstraintsExtension(object):
def test_permitted_excluded(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "nc_permitted_excluded_2.pem"
),
x509.load_pem_x509_certificate,
backend
)
nc = cert.extensions.get_extension_for_oid(
ExtensionOID.NAME_CONSTRAINTS
).value
assert nc == x509.NameConstraints(
permitted_subtrees=[
x509.DNSName(u"zombo.local"),
],
excluded_subtrees=[
x509.DirectoryName(x509.Name([
x509.NameAttribute(NameOID.COMMON_NAME, u"zombo")
]))
]
)
def test_permitted(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "nc_permitted_2.pem"
),
x509.load_pem_x509_certificate,
backend
)
nc = cert.extensions.get_extension_for_oid(
ExtensionOID.NAME_CONSTRAINTS
).value
assert nc == x509.NameConstraints(
permitted_subtrees=[
x509.DNSName(u"zombo.local"),
],
excluded_subtrees=None
)
def test_permitted_with_leading_period(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "nc_permitted.pem"
),
x509.load_pem_x509_certificate,
backend
)
nc = cert.extensions.get_extension_for_oid(
ExtensionOID.NAME_CONSTRAINTS
).value
assert nc == x509.NameConstraints(
permitted_subtrees=[
x509.DNSName(u".cryptography.io"),
x509.UniformResourceIdentifier(u"ftp://cryptography.test")
],
excluded_subtrees=None
)
def test_excluded_with_leading_period(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "nc_excluded.pem"
),
x509.load_pem_x509_certificate,
backend
)
nc = cert.extensions.get_extension_for_oid(
ExtensionOID.NAME_CONSTRAINTS
).value
assert nc == x509.NameConstraints(
permitted_subtrees=None,
excluded_subtrees=[
x509.DNSName(u".cryptography.io"),
x509.UniformResourceIdentifier(u"gopher://cryptography.test")
]
)
def test_permitted_excluded_with_ips(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "nc_permitted_excluded.pem"
),
x509.load_pem_x509_certificate,
backend
)
nc = cert.extensions.get_extension_for_oid(
ExtensionOID.NAME_CONSTRAINTS
).value
assert nc == x509.NameConstraints(
permitted_subtrees=[
x509.IPAddress(ipaddress.IPv4Network(u"192.168.0.0/24")),
x509.IPAddress(ipaddress.IPv6Network(u"FF:0:0:0:0:0:0:0/96")),
],
excluded_subtrees=[
x509.DNSName(u".domain.com"),
x509.UniformResourceIdentifier(u"http://test.local"),
]
)
def test_single_ip_netmask(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "nc_single_ip_netmask.pem"
),
x509.load_pem_x509_certificate,
backend
)
nc = cert.extensions.get_extension_for_oid(
ExtensionOID.NAME_CONSTRAINTS
).value
assert nc == x509.NameConstraints(
permitted_subtrees=[
x509.IPAddress(ipaddress.IPv6Network(u"FF:0:0:0:0:0:0:0/128")),
x509.IPAddress(ipaddress.IPv4Network(u"192.168.0.1/32")),
],
excluded_subtrees=None
)
def test_invalid_netmask(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "nc_invalid_ip_netmask.pem"
),
x509.load_pem_x509_certificate,
backend
)
with pytest.raises(ValueError):
cert.extensions.get_extension_for_oid(
ExtensionOID.NAME_CONSTRAINTS
)
def test_certbuilder(self, backend):
permitted = [u'.example.org', u'.\xf5\xe4\xf6\xfc.example.com',
u'foobar.example.net']
private_key = RSA_KEY_2048.private_key(backend)
builder = _make_certbuilder(private_key)
builder = builder.add_extension(
NameConstraints(permitted_subtrees=list(map(DNSName, permitted)),
excluded_subtrees=[]), True)
cert = builder.sign(private_key, hashes.SHA1(), backend)
result = [x.value for x in cert.extensions.get_extension_for_class(
NameConstraints).value.permitted_subtrees]
assert result == permitted
class TestDistributionPoint(object):
def test_distribution_point_full_name_not_general_names(self):
with pytest.raises(TypeError):
x509.DistributionPoint(["notgn"], None, None, None)
def test_distribution_point_relative_name_not_name(self):
with pytest.raises(TypeError):
x509.DistributionPoint(None, "notname", None, None)
def test_distribution_point_full_and_relative_not_none(self):
with pytest.raises(ValueError):
x509.DistributionPoint("data", "notname", None, None)
def test_relative_name_name_value_deprecated(self):
with pytest.deprecated_call():
x509.DistributionPoint(
None,
x509.Name([
x509.NameAttribute(NameOID.COMMON_NAME, u"myCN")
]),
None,
None
)
def test_crl_issuer_not_general_names(self):
with pytest.raises(TypeError):
x509.DistributionPoint(None, None, None, ["notgn"])
def test_reason_not_reasonflags(self):
with pytest.raises(TypeError):
x509.DistributionPoint(
[x509.UniformResourceIdentifier(u"http://crypt.og/crl")],
None,
frozenset(["notreasonflags"]),
None
)
def test_reason_not_frozenset(self):
with pytest.raises(TypeError):
x509.DistributionPoint(
[x509.UniformResourceIdentifier(u"http://crypt.og/crl")],
None,
[x509.ReasonFlags.ca_compromise],
None
)
def test_disallowed_reasons(self):
with pytest.raises(ValueError):
x509.DistributionPoint(
[x509.UniformResourceIdentifier(u"http://crypt.og/crl")],
None,
frozenset([x509.ReasonFlags.unspecified]),
None
)
with pytest.raises(ValueError):
x509.DistributionPoint(
[x509.UniformResourceIdentifier(u"http://crypt.og/crl")],
None,
frozenset([x509.ReasonFlags.remove_from_crl]),
None
)
def test_reason_only(self):
with pytest.raises(ValueError):
x509.DistributionPoint(
None,
None,
frozenset([x509.ReasonFlags.aa_compromise]),
None
)
def test_eq(self):
dp = x509.DistributionPoint(
[x509.UniformResourceIdentifier(u"http://crypt.og/crl")],
None,
frozenset([x509.ReasonFlags.superseded]),
[
x509.DirectoryName(
x509.Name([
x509.NameAttribute(
NameOID.COMMON_NAME, u"Important CA"
)
])
)
],
)
dp2 = x509.DistributionPoint(
[x509.UniformResourceIdentifier(u"http://crypt.og/crl")],
None,
frozenset([x509.ReasonFlags.superseded]),
[
x509.DirectoryName(
x509.Name([
x509.NameAttribute(
NameOID.COMMON_NAME, u"Important CA"
)
])
)
],
)
assert dp == dp2
def test_ne(self):
dp = x509.DistributionPoint(
[x509.UniformResourceIdentifier(u"http://crypt.og/crl")],
None,
frozenset([x509.ReasonFlags.superseded]),
[
x509.DirectoryName(
x509.Name([
x509.NameAttribute(
NameOID.COMMON_NAME, u"Important CA"
)
])
)
],
)
dp2 = x509.DistributionPoint(
[x509.UniformResourceIdentifier(u"http://crypt.og/crl")],
None,
None,
None
)
assert dp != dp2
assert dp != object()
def test_iter_input(self):
name = [x509.UniformResourceIdentifier(u"http://crypt.og/crl")]
issuer = [
x509.DirectoryName(
x509.Name([
x509.NameAttribute(NameOID.COMMON_NAME, u"Important CA")
])
)
]
dp = x509.DistributionPoint(
iter(name),
None,
frozenset([x509.ReasonFlags.ca_compromise]),
iter(issuer),
)
assert list(dp.full_name) == name
assert list(dp.crl_issuer) == issuer
def test_repr(self):
dp = x509.DistributionPoint(
None,
x509.RelativeDistinguishedName([
x509.NameAttribute(NameOID.COMMON_NAME, u"myCN")
]),
frozenset([x509.ReasonFlags.ca_compromise]),
[
x509.DirectoryName(
x509.Name([
x509.NameAttribute(
NameOID.COMMON_NAME, u"Important CA"
)
])
)
],
)
if six.PY3:
assert repr(dp) == (
"<DistributionPoint(full_name=None, relative_name=<RelativeDis"
"tinguishedName([<NameAttribute(oid=<ObjectIdentifier(oid=2.5."
"4.3, name=commonName)>, value='myCN')>])>, reasons=frozenset("
"{<ReasonFlags.ca_compromise: 'cACompromise'>}), crl_issuer=[<"
"DirectoryName(value=<Name([<NameAttribute(oid=<ObjectIdentifi"
"er(oid=2.5.4.3, name=commonName)>, value='Important CA')>])>)"
">])>"
)
else:
assert repr(dp) == (
"<DistributionPoint(full_name=None, relative_name=<RelativeDis"
"tinguishedName([<NameAttribute(oid=<ObjectIdentifier(oid=2.5."
"4.3, name=commonName)>, value=u'myCN')>])>, reasons=frozenset"
"([<ReasonFlags.ca_compromise: 'cACompromise'>]), crl_issuer=["
"<DirectoryName(value=<Name([<NameAttribute(oid=<ObjectIdentif"
"ier(oid=2.5.4.3, name=commonName)>, value=u'Important CA')>])"
">)>])>"
)
class TestCRLDistributionPoints(object):
def test_invalid_distribution_points(self):
with pytest.raises(TypeError):
x509.CRLDistributionPoints(["notadistributionpoint"])
def test_iter_len(self):
cdp = x509.CRLDistributionPoints([
x509.DistributionPoint(
[x509.UniformResourceIdentifier(u"http://domain")],
None,
None,
None
),
x509.DistributionPoint(
[x509.UniformResourceIdentifier(u"ftp://domain")],
None,
frozenset([
x509.ReasonFlags.key_compromise,
x509.ReasonFlags.ca_compromise,
]),
None
),
])
assert len(cdp) == 2
assert list(cdp) == [
x509.DistributionPoint(
[x509.UniformResourceIdentifier(u"http://domain")],
None,
None,
None
),
x509.DistributionPoint(
[x509.UniformResourceIdentifier(u"ftp://domain")],
None,
frozenset([
x509.ReasonFlags.key_compromise,
x509.ReasonFlags.ca_compromise,
]),
None
),
]
def test_iter_input(self):
points = [
x509.DistributionPoint(
[x509.UniformResourceIdentifier(u"http://domain")],
None,
None,
None
),
]
cdp = x509.CRLDistributionPoints(iter(points))
assert list(cdp) == points
def test_repr(self):
cdp = x509.CRLDistributionPoints([
x509.DistributionPoint(
[x509.UniformResourceIdentifier(u"ftp://domain")],
None,
frozenset([x509.ReasonFlags.key_compromise]),
None
),
])
if six.PY3:
assert repr(cdp) == (
"<CRLDistributionPoints([<DistributionPoint(full_name=[<Unifo"
"rmResourceIdentifier(value=ftp://domain)>], relative_name=No"
"ne, reasons=frozenset({<ReasonFlags.key_compromise: 'keyComp"
"romise'>}), crl_issuer=None)>])>"
)
else:
assert repr(cdp) == (
"<CRLDistributionPoints([<DistributionPoint(full_name=[<Unifo"
"rmResourceIdentifier(value=ftp://domain)>], relative_name=No"
"ne, reasons=frozenset([<ReasonFlags.key_compromise: 'keyComp"
"romise'>]), crl_issuer=None)>])>"
)
def test_eq(self):
cdp = x509.CRLDistributionPoints([
x509.DistributionPoint(
[x509.UniformResourceIdentifier(u"ftp://domain")],
None,
frozenset([
x509.ReasonFlags.key_compromise,
x509.ReasonFlags.ca_compromise,
]),
[x509.UniformResourceIdentifier(u"uri://thing")],
),
])
cdp2 = x509.CRLDistributionPoints([
x509.DistributionPoint(
[x509.UniformResourceIdentifier(u"ftp://domain")],
None,
frozenset([
x509.ReasonFlags.key_compromise,
x509.ReasonFlags.ca_compromise,
]),
[x509.UniformResourceIdentifier(u"uri://thing")],
),
])
assert cdp == cdp2
def test_ne(self):
cdp = x509.CRLDistributionPoints([
x509.DistributionPoint(
[x509.UniformResourceIdentifier(u"ftp://domain")],
None,
frozenset([
x509.ReasonFlags.key_compromise,
x509.ReasonFlags.ca_compromise,
]),
[x509.UniformResourceIdentifier(u"uri://thing")],
),
])
cdp2 = x509.CRLDistributionPoints([
x509.DistributionPoint(
[x509.UniformResourceIdentifier(u"ftp://domain2")],
None,
frozenset([
x509.ReasonFlags.key_compromise,
x509.ReasonFlags.ca_compromise,
]),
[x509.UniformResourceIdentifier(u"uri://thing")],
),
])
cdp3 = x509.CRLDistributionPoints([
x509.DistributionPoint(
[x509.UniformResourceIdentifier(u"ftp://domain")],
None,
frozenset([x509.ReasonFlags.key_compromise]),
[x509.UniformResourceIdentifier(u"uri://thing")],
),
])
cdp4 = x509.CRLDistributionPoints([
x509.DistributionPoint(
[x509.UniformResourceIdentifier(u"ftp://domain")],
None,
frozenset([
x509.ReasonFlags.key_compromise,
x509.ReasonFlags.ca_compromise,
]),
[x509.UniformResourceIdentifier(u"uri://thing2")],
),
])
assert cdp != cdp2
assert cdp != cdp3
assert cdp != cdp4
assert cdp != object()
def test_indexing(self):
ci = x509.CRLDistributionPoints([
x509.DistributionPoint(
None, None, None,
[x509.UniformResourceIdentifier(u"uri://thing")],
),
x509.DistributionPoint(
None, None, None,
[x509.UniformResourceIdentifier(u"uri://thing2")],
),
x509.DistributionPoint(
None, None, None,
[x509.UniformResourceIdentifier(u"uri://thing3")],
),
x509.DistributionPoint(
None, None, None,
[x509.UniformResourceIdentifier(u"uri://thing4")],
),
x509.DistributionPoint(
None, None, None,
[x509.UniformResourceIdentifier(u"uri://thing5")],
),
])
assert ci[-1] == ci[4]
assert ci[2:6:2] == [ci[2], ci[4]]
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
class TestCRLDistributionPointsExtension(object):
def test_fullname_and_crl_issuer(self, backend):
cert = _load_cert(
os.path.join(
"x509", "PKITS_data", "certs", "ValidcRLIssuerTest28EE.crt"
),
x509.load_der_x509_certificate,
backend
)
cdps = cert.extensions.get_extension_for_oid(
ExtensionOID.CRL_DISTRIBUTION_POINTS
).value
assert cdps == x509.CRLDistributionPoints([
x509.DistributionPoint(
full_name=[x509.DirectoryName(
x509.Name([
x509.NameAttribute(NameOID.COUNTRY_NAME, u"US"),
x509.NameAttribute(
NameOID.ORGANIZATION_NAME,
u"Test Certificates 2011"
),
x509.NameAttribute(
NameOID.ORGANIZATIONAL_UNIT_NAME,
u"indirectCRL CA3 cRLIssuer"
),
x509.NameAttribute(
NameOID.COMMON_NAME,
u"indirect CRL for indirectCRL CA3"
),
])
)],
relative_name=None,
reasons=None,
crl_issuer=[x509.DirectoryName(
x509.Name([
x509.NameAttribute(NameOID.COUNTRY_NAME, u"US"),
x509.NameAttribute(
NameOID.ORGANIZATION_NAME,
u"Test Certificates 2011"
),
x509.NameAttribute(
NameOID.ORGANIZATIONAL_UNIT_NAME,
u"indirectCRL CA3 cRLIssuer"
),
])
)],
)
])
def test_relativename_and_crl_issuer(self, backend):
cert = _load_cert(
os.path.join(
"x509", "PKITS_data", "certs", "ValidcRLIssuerTest29EE.crt"
),
x509.load_der_x509_certificate,
backend
)
cdps = cert.extensions.get_extension_for_oid(
ExtensionOID.CRL_DISTRIBUTION_POINTS
).value
assert cdps == x509.CRLDistributionPoints([
x509.DistributionPoint(
full_name=None,
relative_name=x509.RelativeDistinguishedName([
x509.NameAttribute(
NameOID.COMMON_NAME,
u"indirect CRL for indirectCRL CA3"
),
]),
reasons=None,
crl_issuer=[x509.DirectoryName(
x509.Name([
x509.NameAttribute(NameOID.COUNTRY_NAME, u"US"),
x509.NameAttribute(
NameOID.ORGANIZATION_NAME,
u"Test Certificates 2011"
),
x509.NameAttribute(
NameOID.ORGANIZATIONAL_UNIT_NAME,
u"indirectCRL CA3 cRLIssuer"
),
])
)],
)
])
def test_fullname_crl_issuer_reasons(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "cdp_fullname_reasons_crl_issuer.pem"
),
x509.load_pem_x509_certificate,
backend
)
cdps = cert.extensions.get_extension_for_oid(
ExtensionOID.CRL_DISTRIBUTION_POINTS
).value
assert cdps == x509.CRLDistributionPoints([
x509.DistributionPoint(
full_name=[x509.UniformResourceIdentifier(
u"http://myhost.com/myca.crl"
)],
relative_name=None,
reasons=frozenset([
x509.ReasonFlags.key_compromise,
x509.ReasonFlags.ca_compromise
]),
crl_issuer=[x509.DirectoryName(
x509.Name([
x509.NameAttribute(NameOID.COUNTRY_NAME, u"US"),
x509.NameAttribute(
NameOID.ORGANIZATION_NAME, u"PyCA"
),
x509.NameAttribute(
NameOID.COMMON_NAME, u"cryptography CA"
),
])
)],
)
])
def test_all_reasons(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "cdp_all_reasons.pem"
),
x509.load_pem_x509_certificate,
backend
)
cdps = cert.extensions.get_extension_for_oid(
ExtensionOID.CRL_DISTRIBUTION_POINTS
).value
assert cdps == x509.CRLDistributionPoints([
x509.DistributionPoint(
full_name=[x509.UniformResourceIdentifier(
u"http://domain.com/some.crl"
)],
relative_name=None,
reasons=frozenset([
x509.ReasonFlags.key_compromise,
x509.ReasonFlags.ca_compromise,
x509.ReasonFlags.affiliation_changed,
x509.ReasonFlags.superseded,
x509.ReasonFlags.privilege_withdrawn,
x509.ReasonFlags.cessation_of_operation,
x509.ReasonFlags.aa_compromise,
x509.ReasonFlags.certificate_hold,
]),
crl_issuer=None
)
])
def test_single_reason(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "cdp_reason_aa_compromise.pem"
),
x509.load_pem_x509_certificate,
backend
)
cdps = cert.extensions.get_extension_for_oid(
ExtensionOID.CRL_DISTRIBUTION_POINTS
).value
assert cdps == x509.CRLDistributionPoints([
x509.DistributionPoint(
full_name=[x509.UniformResourceIdentifier(
u"http://domain.com/some.crl"
)],
relative_name=None,
reasons=frozenset([x509.ReasonFlags.aa_compromise]),
crl_issuer=None
)
])
def test_crl_issuer_only(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "cdp_crl_issuer.pem"
),
x509.load_pem_x509_certificate,
backend
)
cdps = cert.extensions.get_extension_for_oid(
ExtensionOID.CRL_DISTRIBUTION_POINTS
).value
assert cdps == x509.CRLDistributionPoints([
x509.DistributionPoint(
full_name=None,
relative_name=None,
reasons=None,
crl_issuer=[x509.DirectoryName(
x509.Name([
x509.NameAttribute(
NameOID.COMMON_NAME, u"cryptography CA"
),
])
)],
)
])
def test_crl_empty_hostname(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "cdp_empty_hostname.pem"
),
x509.load_pem_x509_certificate,
backend
)
cdps = cert.extensions.get_extension_for_oid(
ExtensionOID.CRL_DISTRIBUTION_POINTS
).value
assert cdps == x509.CRLDistributionPoints([
x509.DistributionPoint(
full_name=[x509.UniformResourceIdentifier(
u"ldap:/CN=A,OU=B,dc=C,DC=D?E?F?G?H=I"
)],
relative_name=None,
reasons=None,
crl_issuer=None
)
])
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
class TestOCSPNoCheckExtension(object):
def test_nocheck(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "ocsp_nocheck.pem"
),
x509.load_pem_x509_certificate,
backend
)
ext = cert.extensions.get_extension_for_oid(
ExtensionOID.OCSP_NO_CHECK
)
assert isinstance(ext.value, x509.OCSPNoCheck)
class TestInhibitAnyPolicy(object):
def test_not_int(self):
with pytest.raises(TypeError):
x509.InhibitAnyPolicy("notint")
def test_negative_int(self):
with pytest.raises(ValueError):
x509.InhibitAnyPolicy(-1)
def test_repr(self):
iap = x509.InhibitAnyPolicy(0)
assert repr(iap) == "<InhibitAnyPolicy(skip_certs=0)>"
def test_eq(self):
iap = x509.InhibitAnyPolicy(1)
iap2 = x509.InhibitAnyPolicy(1)
assert iap == iap2
def test_ne(self):
iap = x509.InhibitAnyPolicy(1)
iap2 = x509.InhibitAnyPolicy(4)
assert iap != iap2
assert iap != object()
def test_hash(self):
iap = x509.InhibitAnyPolicy(1)
iap2 = x509.InhibitAnyPolicy(1)
iap3 = x509.InhibitAnyPolicy(4)
assert hash(iap) == hash(iap2)
assert hash(iap) != hash(iap3)
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
class TestInhibitAnyPolicyExtension(object):
def test_nocheck(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "inhibit_any_policy_5.pem"
),
x509.load_pem_x509_certificate,
backend
)
iap = cert.extensions.get_extension_for_oid(
ExtensionOID.INHIBIT_ANY_POLICY
).value
assert iap.skip_certs == 5
@pytest.mark.requires_backend_interface(interface=RSABackend)
@pytest.mark.requires_backend_interface(interface=X509Backend)
class TestInvalidExtension(object):
def test_invalid_certificate_policies_data(self, backend):
cert = _load_cert(
os.path.join(
"x509", "custom", "cp_invalid.pem"
),
x509.load_pem_x509_certificate,
backend
)
with pytest.raises(ValueError):
cert.extensions
| 34.25354 | 79 | 0.571797 |
1c877b91fbbaff27fa67c77410239a088bdb13a9 | 1,068 | py | Python | guillotina/auth/role.py | rboixaderg/guillotina | fcae65c2185222272f3b8fee4bc2754e81e0e983 | [
"BSD-2-Clause"
] | 173 | 2017-03-10T18:26:12.000Z | 2022-03-03T06:48:56.000Z | guillotina/auth/role.py | rboixaderg/guillotina | fcae65c2185222272f3b8fee4bc2754e81e0e983 | [
"BSD-2-Clause"
] | 921 | 2017-03-08T14:04:43.000Z | 2022-03-30T10:28:56.000Z | guillotina/auth/role.py | rboixaderg/guillotina | fcae65c2185222272f3b8fee4bc2754e81e0e983 | [
"BSD-2-Clause"
] | 60 | 2017-03-16T19:59:44.000Z | 2022-03-03T06:48:59.000Z | from guillotina._settings import app_settings
from guillotina.component import get_utilities_for
from guillotina.interfaces import IRole
from zope.interface import implementer
@implementer(IRole)
class Role(object):
def __init__(self, id, title, description="", local=True):
self.id = id
self.title = title
self.description = description
self.local = local
def check_role(context, role_id):
names = [name for name, util in get_utilities_for(IRole, context)]
if role_id not in names:
raise ValueError(f'Undefined role id "{role_id}"')
def local_roles():
if "local_roles" in app_settings:
return app_settings["local_roles"]
names = [name for name, util in get_utilities_for(IRole) if util.local]
app_settings["local_roles"] = names
return names
def global_roles():
if "global_roles" in app_settings:
return app_settings["global_roles"]
names = [name for name, util in get_utilities_for(IRole) if not util.local]
app_settings["global_roles"] = names
return names
| 29.666667 | 79 | 0.712547 |
4b3e935426f9f1d79b91b7301b5cd6725960fb54 | 58,611 | py | Python | python/paddle/fluid/tests/unittests/test_var_base.py | xiaoyangyang2/Paddle | b1a4668c5ff39e44efcfea46d567a5c398fdf3dc | [
"Apache-2.0"
] | null | null | null | python/paddle/fluid/tests/unittests/test_var_base.py | xiaoyangyang2/Paddle | b1a4668c5ff39e44efcfea46d567a5c398fdf3dc | [
"Apache-2.0"
] | null | null | null | python/paddle/fluid/tests/unittests/test_var_base.py | xiaoyangyang2/Paddle | b1a4668c5ff39e44efcfea46d567a5c398fdf3dc | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import numpy as np
import six
import copy
import paddle
import paddle.fluid as fluid
import paddle.fluid.core as core
from paddle.fluid.framework import _test_eager_guard, _in_eager_mode
class TestVarBase(unittest.TestCase):
def setUp(self):
self.shape = [512, 1234]
self.dtype = np.float32
self.array = np.random.uniform(0.1, 1, self.shape).astype(self.dtype)
def test_to_tensor(self):
def _test_place(place):
with fluid.dygraph.guard():
paddle.set_default_dtype('float32')
# set_default_dtype should not take effect on int
x = paddle.to_tensor(1, place=place, stop_gradient=False)
self.assertTrue(np.array_equal(x.numpy(), [1]))
self.assertNotEqual(x.dtype, core.VarDesc.VarType.FP32)
y = paddle.to_tensor(2, place=x.place)
self.assertEqual(str(x.place), str(y.place))
# set_default_dtype should not take effect on numpy
x = paddle.to_tensor(
np.array([1.2]).astype('float16'),
place=place,
stop_gradient=False)
self.assertTrue(
np.array_equal(x.numpy(), np.array([1.2], 'float16')))
self.assertEqual(x.dtype, core.VarDesc.VarType.FP16)
# set_default_dtype take effect on int
x = paddle.to_tensor(1, place=place)
self.assertTrue(x.dtype, core.VarDesc.VarType.INT64)
# set_default_dtype take effect on float
x = paddle.to_tensor(1.2, place=place, stop_gradient=False)
self.assertTrue(
np.array_equal(x.numpy(), np.array([1.2]).astype(
'float32')))
self.assertEqual(x.dtype, core.VarDesc.VarType.FP32)
clone_x = x.clone()
self.assertTrue(
np.array_equal(clone_x.numpy(),
np.array([1.2]).astype('float32')))
self.assertEqual(clone_x.dtype, core.VarDesc.VarType.FP32)
y = clone_x**2
y.backward()
self.assertTrue(
np.array_equal(x.grad.numpy(),
np.array([2.4]).astype('float32')))
y = x.cpu()
self.assertEqual(y.place.__repr__(), "Place(cpu)")
if core.is_compiled_with_cuda():
y = x.pin_memory()
self.assertEqual(y.place.__repr__(), "Place(gpu_pinned)")
y = x.cuda()
y = x.cuda(None)
self.assertEqual(y.place.__repr__(), "Place(gpu:0)")
y = x.cuda(device_id=0)
self.assertEqual(y.place.__repr__(), "Place(gpu:0)")
y = x.cuda(blocking=False)
self.assertEqual(y.place.__repr__(), "Place(gpu:0)")
y = x.cuda(blocking=True)
self.assertEqual(y.place.__repr__(), "Place(gpu:0)")
with self.assertRaises(ValueError):
y = x.cuda("test")
# support 'dtype' is core.VarType
x = paddle.rand((2, 2))
y = paddle.to_tensor([2, 2], dtype=x.dtype)
self.assertEqual(y.dtype, core.VarDesc.VarType.FP32)
# set_default_dtype take effect on complex
x = paddle.to_tensor(1 + 2j, place=place, stop_gradient=False)
self.assertTrue(np.array_equal(x.numpy(), [1 + 2j]))
self.assertEqual(x.dtype, core.VarDesc.VarType.COMPLEX64)
paddle.set_default_dtype('float64')
x = paddle.to_tensor(1.2, place=place, stop_gradient=False)
self.assertTrue(np.array_equal(x.numpy(), [1.2]))
self.assertEqual(x.dtype, core.VarDesc.VarType.FP64)
x = paddle.to_tensor(1 + 2j, place=place, stop_gradient=False)
self.assertTrue(np.array_equal(x.numpy(), [1 + 2j]))
self.assertEqual(x.dtype, core.VarDesc.VarType.COMPLEX128)
x = paddle.to_tensor(
1, dtype='float32', place=place, stop_gradient=False)
self.assertTrue(np.array_equal(x.numpy(), [1.]))
self.assertEqual(x.dtype, core.VarDesc.VarType.FP32)
self.assertEqual(x.shape, [1])
self.assertEqual(x.stop_gradient, False)
self.assertEqual(x.type, core.VarDesc.VarType.LOD_TENSOR)
x = paddle.to_tensor(
(1, 2), dtype='float32', place=place, stop_gradient=False)
x = paddle.to_tensor(
[1, 2], dtype='float32', place=place, stop_gradient=False)
self.assertTrue(np.array_equal(x.numpy(), [1., 2.]))
self.assertEqual(x.dtype, core.VarDesc.VarType.FP32)
self.assertEqual(x.grad, None)
self.assertEqual(x.shape, [2])
self.assertEqual(x.stop_gradient, False)
self.assertEqual(x.type, core.VarDesc.VarType.LOD_TENSOR)
x = paddle.to_tensor(
self.array,
dtype='float32',
place=place,
stop_gradient=False)
self.assertTrue(np.array_equal(x.numpy(), self.array))
self.assertEqual(x.dtype, core.VarDesc.VarType.FP32)
self.assertEqual(x.shape, self.shape)
self.assertEqual(x.stop_gradient, False)
self.assertEqual(x.type, core.VarDesc.VarType.LOD_TENSOR)
y = paddle.to_tensor(x)
y = paddle.to_tensor(y, dtype='float64', place=place)
self.assertTrue(np.array_equal(y.numpy(), self.array))
self.assertEqual(y.dtype, core.VarDesc.VarType.FP64)
self.assertEqual(y.shape, self.shape)
self.assertEqual(y.stop_gradient, True)
self.assertEqual(y.type, core.VarDesc.VarType.LOD_TENSOR)
z = x + y
self.assertTrue(np.array_equal(z.numpy(), 2 * self.array))
x = paddle.to_tensor(
[1 + 2j, 1 - 2j], dtype='complex64', place=place)
y = paddle.to_tensor(x)
self.assertTrue(np.array_equal(x.numpy(), [1 + 2j, 1 - 2j]))
self.assertEqual(y.dtype, core.VarDesc.VarType.COMPLEX64)
self.assertEqual(y.shape, [2])
paddle.set_default_dtype('float32')
x = paddle.randn([3, 4])
x_array = np.array(x)
self.assertEqual(x_array.shape, x.numpy().shape)
self.assertEqual(x_array.dtype, x.numpy().dtype)
self.assertTrue(np.array_equal(x_array, x.numpy()))
x = paddle.to_tensor(1.0)
self.assertEqual(x.item(), 1.0)
self.assertTrue(isinstance(x.item(), float))
x = paddle.randn([3, 2, 2])
self.assertTrue(isinstance(x.item(5), float))
self.assertTrue(isinstance(x.item(1, 0, 1), float))
self.assertEqual(x.item(5), x.item(1, 0, 1))
self.assertTrue(
np.array_equal(x.item(1, 0, 1), x.numpy().item(1, 0, 1)))
x = paddle.to_tensor([[1.111111, 2.222222, 3.333333]])
self.assertEqual(x.item(0, 2), x.item(2))
self.assertAlmostEqual(x.item(2), 3.333333)
self.assertTrue(isinstance(x.item(0, 2), float))
x = paddle.to_tensor(1.0, dtype='float64')
self.assertEqual(x.item(), 1.0)
self.assertTrue(isinstance(x.item(), float))
x = paddle.to_tensor(1.0, dtype='float16')
self.assertEqual(x.item(), 1.0)
self.assertTrue(isinstance(x.item(), float))
x = paddle.to_tensor(1, dtype='uint8')
self.assertEqual(x.item(), 1)
self.assertTrue(isinstance(x.item(), int))
x = paddle.to_tensor(1, dtype='int8')
self.assertEqual(x.item(), 1)
self.assertTrue(isinstance(x.item(), int))
x = paddle.to_tensor(1, dtype='int16')
self.assertEqual(x.item(), 1)
self.assertTrue(isinstance(x.item(), int))
x = paddle.to_tensor(1, dtype='int32')
self.assertEqual(x.item(), 1)
self.assertTrue(isinstance(x.item(), int))
x = paddle.to_tensor(1, dtype='int64')
self.assertEqual(x.item(), 1)
self.assertTrue(isinstance(x.item(), int))
x = paddle.to_tensor(True)
self.assertEqual(x.item(), True)
self.assertTrue(isinstance(x.item(), bool))
x = paddle.to_tensor(1 + 1j)
self.assertEqual(x.item(), 1 + 1j)
self.assertTrue(isinstance(x.item(), complex))
numpy_array = np.random.randn(3, 4)
# covert core.LoDTensor to paddle.Tensor
lod_tensor = paddle.fluid.core.LoDTensor()
place = paddle.fluid.framework._current_expected_place()
lod_tensor.set(numpy_array, place)
x = paddle.to_tensor(lod_tensor)
self.assertTrue(np.array_equal(x.numpy(), numpy_array))
self.assertEqual(x.type, core.VarDesc.VarType.LOD_TENSOR)
self.assertEqual(str(x.place), str(place))
# covert core.Tensor to paddle.Tensor
x = paddle.to_tensor(numpy_array)
dlpack = x.value().get_tensor()._to_dlpack()
tensor_from_dlpack = paddle.fluid.core.from_dlpack(dlpack)
x = paddle.to_tensor(tensor_from_dlpack)
self.assertTrue(np.array_equal(x.numpy(), numpy_array))
self.assertEqual(x.type, core.VarDesc.VarType.LOD_TENSOR)
with self.assertRaises(ValueError):
paddle.randn([3, 2, 2]).item()
with self.assertRaises(ValueError):
paddle.randn([3, 2, 2]).item(18)
with self.assertRaises(ValueError):
paddle.randn([3, 2, 2]).item(1, 2)
with self.assertRaises(ValueError):
paddle.randn([3, 2, 2]).item(2, 1, 2)
with self.assertRaises(TypeError):
paddle.to_tensor('test')
with self.assertRaises(TypeError):
paddle.to_tensor(1, dtype='test')
with self.assertRaises(ValueError):
paddle.to_tensor([[1], [2, 3]])
with self.assertRaises(ValueError):
paddle.to_tensor([[1], [2, 3]], place='test')
with self.assertRaises(ValueError):
paddle.to_tensor([[1], [2, 3]], place=1)
_test_place(core.CPUPlace())
_test_place("cpu")
if core.is_compiled_with_cuda():
_test_place(core.CUDAPinnedPlace())
_test_place("gpu_pinned")
_test_place(core.CUDAPlace(0))
_test_place("gpu:0")
if core.is_compiled_with_npu():
_test_place(core.NPUPlace(0))
_test_place("npu:0")
def test_to_tensor_not_change_input_stop_gradient(self):
with paddle.fluid.dygraph.guard(core.CPUPlace()):
a = paddle.zeros([1024])
a.stop_gradient = False
b = paddle.to_tensor(a)
self.assertEqual(a.stop_gradient, False)
self.assertEqual(b.stop_gradient, True)
def test_to_tensor_change_place(self):
if core.is_compiled_with_cuda():
a_np = np.random.rand(1024, 1024)
with paddle.fluid.dygraph.guard(core.CPUPlace()):
a = paddle.to_tensor(a_np, place=paddle.CUDAPinnedPlace())
a = paddle.to_tensor(a)
self.assertEqual(a.place.__repr__(), "Place(cpu)")
with paddle.fluid.dygraph.guard(core.CUDAPlace(0)):
a = paddle.to_tensor(a_np, place=paddle.CUDAPinnedPlace())
a = paddle.to_tensor(a)
self.assertEqual(a.place.__repr__(), "Place(gpu:0)")
with paddle.fluid.dygraph.guard(core.CUDAPlace(0)):
a = paddle.to_tensor(a_np, place=paddle.CPUPlace())
a = paddle.to_tensor(a, place=paddle.CUDAPinnedPlace())
self.assertEqual(a.place.__repr__(), "Place(gpu_pinned)")
def test_to_tensor_with_lodtensor(self):
if core.is_compiled_with_cuda():
a_np = np.random.rand(1024, 1024)
with paddle.fluid.dygraph.guard(core.CPUPlace()):
lod_tensor = core.LoDTensor()
lod_tensor.set(a_np, core.CPUPlace())
a = paddle.to_tensor(lod_tensor)
self.assertTrue(np.array_equal(a_np, a.numpy()))
with paddle.fluid.dygraph.guard(core.CUDAPlace(0)):
lod_tensor = core.LoDTensor()
lod_tensor.set(a_np, core.CUDAPlace(0))
a = paddle.to_tensor(lod_tensor, place=core.CPUPlace())
self.assertTrue(np.array_equal(a_np, a.numpy()))
self.assertTrue(a.place.__repr__(), "Place(cpu)")
def test_to_variable(self):
with fluid.dygraph.guard():
var = fluid.dygraph.to_variable(self.array, name="abc")
self.assertTrue(np.array_equal(var.numpy(), self.array))
self.assertEqual(var.name, 'abc')
# default value
self.assertEqual(var.persistable, False)
self.assertEqual(var.stop_gradient, True)
self.assertEqual(var.shape, self.shape)
self.assertEqual(var.dtype, core.VarDesc.VarType.FP32)
self.assertEqual(var.type, core.VarDesc.VarType.LOD_TENSOR)
# The type of input must be 'ndarray' or 'Variable', it will raise TypeError
with self.assertRaises(TypeError):
var = fluid.dygraph.to_variable("test", name="abc")
# test to_variable of LayerObjectHelper(LayerHelperBase)
with self.assertRaises(TypeError):
linear = fluid.dygraph.Linear(32, 64)
var = linear._helper.to_variable("test", name="abc")
def test_list_to_variable(self):
with fluid.dygraph.guard():
array = [[[1, 2], [1, 2], [1.0, 2]], [[1, 2], [1, 2], [1, 2]]]
var = fluid.dygraph.to_variable(array, dtype='int32')
self.assertTrue(np.array_equal(var.numpy(), array))
self.assertEqual(var.shape, [2, 3, 2])
self.assertEqual(var.dtype, core.VarDesc.VarType.INT32)
self.assertEqual(var.type, core.VarDesc.VarType.LOD_TENSOR)
def test_tuple_to_variable(self):
with fluid.dygraph.guard():
array = (((1, 2), (1, 2), (1, 2)), ((1, 2), (1, 2), (1, 2)))
var = fluid.dygraph.to_variable(array, dtype='float32')
self.assertTrue(np.array_equal(var.numpy(), array))
self.assertEqual(var.shape, [2, 3, 2])
self.assertEqual(var.dtype, core.VarDesc.VarType.FP32)
self.assertEqual(var.type, core.VarDesc.VarType.LOD_TENSOR)
def test_tensor_to_variable(self):
with fluid.dygraph.guard():
t = fluid.Tensor()
t.set(np.random.random((1024, 1024)), fluid.CPUPlace())
var = fluid.dygraph.to_variable(t)
self.assertTrue(np.array_equal(t, var.numpy()))
def test_leaf_tensor(self):
with fluid.dygraph.guard():
x = paddle.to_tensor(np.random.uniform(-1, 1, size=[10, 10]))
self.assertTrue(x.is_leaf)
y = x + 1
self.assertTrue(y.is_leaf)
x = paddle.to_tensor(
np.random.uniform(
-1, 1, size=[10, 10]), stop_gradient=False)
self.assertTrue(x.is_leaf)
y = x + 1
self.assertFalse(y.is_leaf)
linear = paddle.nn.Linear(10, 10)
input = paddle.to_tensor(
np.random.uniform(
-1, 1, size=[10, 10]).astype('float32'),
stop_gradient=False)
self.assertTrue(input.is_leaf)
out = linear(input)
self.assertTrue(linear.weight.is_leaf)
self.assertTrue(linear.bias.is_leaf)
self.assertFalse(out.is_leaf)
def test_detach(self):
with fluid.dygraph.guard():
x = paddle.to_tensor(1.0, dtype="float64", stop_gradient=False)
detach_x = x.detach()
self.assertTrue(detach_x.stop_gradient, True)
cmp_float = np.allclose if core.is_compiled_with_rocm(
) else np.array_equal
detach_x[:] = 10.0
self.assertTrue(cmp_float(x.numpy(), [10.0]))
y = x**2
y.backward()
self.assertTrue(cmp_float(x.grad.numpy(), [20.0]))
self.assertEqual(detach_x.grad, None)
detach_x.stop_gradient = False # Set stop_gradient to be False, supported auto-grad
z = 3 * detach_x**2
z.backward()
self.assertTrue(cmp_float(x.grad.numpy(), [20.0]))
self.assertTrue(cmp_float(detach_x.grad.numpy(), [60.0]))
with self.assertRaises(ValueError):
detach_x[:] = 5.0
detach_x.stop_gradient = True
# Due to sharing of data with origin Tensor, There are some unsafe operations:
with self.assertRaises(RuntimeError):
y = 2**x
detach_x[:] = 5.0
y.backward()
def test_write_property(self):
with fluid.dygraph.guard():
var = fluid.dygraph.to_variable(self.array)
self.assertEqual(var.name, 'generated_tensor_0')
var.name = 'test'
self.assertEqual(var.name, 'test')
self.assertEqual(var.persistable, False)
var.persistable = True
self.assertEqual(var.persistable, True)
self.assertEqual(var.stop_gradient, True)
var.stop_gradient = False
self.assertEqual(var.stop_gradient, False)
def test_deep_copy(self):
with fluid.dygraph.guard():
empty_var = core.VarBase()
empty_var_copy = copy.deepcopy(empty_var)
self.assertEqual(empty_var.stop_gradient,
empty_var_copy.stop_gradient)
self.assertEqual(empty_var.persistable, empty_var_copy.persistable)
self.assertEqual(empty_var.type, empty_var_copy.type)
self.assertEqual(empty_var.dtype, empty_var_copy.dtype)
x = paddle.to_tensor([2.], stop_gradient=False)
y = paddle.to_tensor([3.], stop_gradient=False)
z = x * y
memo = {}
x_copy = copy.deepcopy(x, memo)
y_copy = copy.deepcopy(y, memo)
self.assertEqual(x_copy.stop_gradient, y_copy.stop_gradient)
self.assertEqual(x_copy.persistable, y_copy.persistable)
self.assertEqual(x_copy.type, y_copy.type)
self.assertEqual(x_copy.dtype, y_copy.dtype)
self.assertTrue(np.array_equal(x.numpy(), x_copy.numpy()))
self.assertTrue(np.array_equal(y.numpy(), y_copy.numpy()))
self.assertNotEqual(id(x), id(x_copy))
self.assertTrue(np.array_equal(x.numpy(), [2.]))
with self.assertRaises(ValueError):
x_copy[:] = 5.
with self.assertRaises(RuntimeError):
copy.deepcopy(z)
x_copy2 = copy.deepcopy(x, memo)
y_copy2 = copy.deepcopy(y, memo)
self.assertEqual(id(x_copy), id(x_copy2))
self.assertEqual(id(y_copy), id(y_copy2))
# test copy selected rows
x = core.VarBase(core.VarDesc.VarType.FP32, [3, 100],
"selected_rows",
core.VarDesc.VarType.SELECTED_ROWS, True)
selected_rows = x.value().get_selected_rows()
selected_rows.get_tensor().set(
np.random.rand(3, 100), core.CPUPlace())
selected_rows.set_height(10)
selected_rows.set_rows([3, 5, 7])
x_copy = copy.deepcopy(x)
self.assertEqual(x_copy.stop_gradient, x.stop_gradient)
self.assertEqual(x_copy.persistable, x.persistable)
self.assertEqual(x_copy.type, x.type)
self.assertEqual(x_copy.dtype, x.dtype)
copy_selected_rows = x_copy.value().get_selected_rows()
self.assertEqual(copy_selected_rows.height(),
selected_rows.height())
self.assertEqual(copy_selected_rows.rows(), selected_rows.rows())
self.assertTrue(
np.array_equal(
np.array(copy_selected_rows.get_tensor()),
np.array(selected_rows.get_tensor())))
# test some patched methods
def test_set_value(self):
with fluid.dygraph.guard():
var = fluid.dygraph.to_variable(self.array)
tmp1 = np.random.uniform(0.1, 1, [2, 2, 3]).astype(self.dtype)
self.assertRaises(AssertionError, var.set_value, tmp1)
tmp2 = np.random.uniform(0.1, 1, self.shape).astype(self.dtype)
var.set_value(tmp2)
self.assertTrue(np.array_equal(var.numpy(), tmp2))
def test_to_string(self):
with fluid.dygraph.guard():
var = fluid.dygraph.to_variable(self.array)
self.assertTrue(isinstance(str(var), str))
def test_element_size(self):
with fluid.dygraph.guard():
x = paddle.to_tensor(1, dtype='bool')
self.assertEqual(x.element_size(), 1)
x = paddle.to_tensor(1, dtype='float16')
self.assertEqual(x.element_size(), 2)
x = paddle.to_tensor(1, dtype='float32')
self.assertEqual(x.element_size(), 4)
x = paddle.to_tensor(1, dtype='float64')
self.assertEqual(x.element_size(), 8)
x = paddle.to_tensor(1, dtype='int8')
self.assertEqual(x.element_size(), 1)
x = paddle.to_tensor(1, dtype='int16')
self.assertEqual(x.element_size(), 2)
x = paddle.to_tensor(1, dtype='int32')
self.assertEqual(x.element_size(), 4)
x = paddle.to_tensor(1, dtype='int64')
self.assertEqual(x.element_size(), 8)
x = paddle.to_tensor(1, dtype='uint8')
self.assertEqual(x.element_size(), 1)
x = paddle.to_tensor(1, dtype='complex64')
self.assertEqual(x.element_size(), 8)
x = paddle.to_tensor(1, dtype='complex128')
self.assertEqual(x.element_size(), 16)
def test_backward(self):
with fluid.dygraph.guard():
var = fluid.dygraph.to_variable(self.array)
var.stop_gradient = False
loss = fluid.layers.relu(var)
loss.backward()
grad_var = var._grad_ivar()
self.assertEqual(grad_var.shape, self.shape)
def test_gradient(self):
with fluid.dygraph.guard():
var = fluid.dygraph.to_variable(self.array)
var.stop_gradient = False
loss = fluid.layers.relu(var)
loss.backward()
grad_var = var.gradient()
self.assertEqual(grad_var.shape, self.array.shape)
def test_block(self):
with fluid.dygraph.guard():
var = fluid.dygraph.to_variable(self.array)
self.assertEqual(var.block,
fluid.default_main_program().global_block())
def _test_slice(self):
w = fluid.dygraph.to_variable(
np.random.random((784, 100, 100)).astype('float64'))
for i in range(3):
nw = w[i]
self.assertEqual((100, 100), tuple(nw.shape))
nw = w[:]
self.assertEqual((784, 100, 100), tuple(nw.shape))
nw = w[:, :]
self.assertEqual((784, 100, 100), tuple(nw.shape))
nw = w[:, :, -1]
self.assertEqual((784, 100), tuple(nw.shape))
nw = w[1, 1, 1]
self.assertEqual(len(nw.shape), 1)
self.assertEqual(nw.shape[0], 1)
nw = w[:, :, :-1]
self.assertEqual((784, 100, 99), tuple(nw.shape))
tensor_array = np.array(
[[[1, 2, 3], [4, 5, 6], [7, 8, 9]],
[[10, 11, 12], [13, 14, 15], [16, 17, 18]],
[[19, 20, 21], [22, 23, 24], [25, 26, 27]]]).astype('float32')
var = fluid.dygraph.to_variable(tensor_array)
var1 = var[0, 1, 1]
var2 = var[1:]
var3 = var[0:1]
var4 = var[::-1]
var5 = var[1, 1:, 1:]
var_reshape = fluid.layers.reshape(var, [3, -1, 3])
var6 = var_reshape[:, :, -1]
var7 = var[:, :, :-1]
var8 = var[:1, :1, :1]
var9 = var[:-1, :-1, :-1]
var10 = var[::-1, :1, :-1]
var11 = var[:-1, ::-1, -1:]
var12 = var[1:2, 2:, ::-1]
var13 = var[2:10, 2:, -2:-1]
var14 = var[1:-1, 0:2, ::-1]
var15 = var[::-1, ::-1, ::-1]
var16 = var[-4:4]
var17 = var[:, 0, 0:0]
var18 = var[:, 1:1:2]
vars = [
var, var1, var2, var3, var4, var5, var6, var7, var8, var9, var10,
var11, var12, var13, var14, var15, var16, var17, var18
]
local_out = [var.numpy() for var in vars]
self.assertTrue(np.array_equal(local_out[1], tensor_array[0, 1, 1:2]))
self.assertTrue(np.array_equal(local_out[2], tensor_array[1:]))
self.assertTrue(np.array_equal(local_out[3], tensor_array[0:1]))
self.assertTrue(np.array_equal(local_out[4], tensor_array[::-1]))
self.assertTrue(np.array_equal(local_out[5], tensor_array[1, 1:, 1:]))
self.assertTrue(
np.array_equal(local_out[6],
tensor_array.reshape((3, -1, 3))[:, :, -1]))
self.assertTrue(np.array_equal(local_out[7], tensor_array[:, :, :-1]))
self.assertTrue(np.array_equal(local_out[8], tensor_array[:1, :1, :1]))
self.assertTrue(
np.array_equal(local_out[9], tensor_array[:-1, :-1, :-1]))
self.assertTrue(
np.array_equal(local_out[10], tensor_array[::-1, :1, :-1]))
self.assertTrue(
np.array_equal(local_out[11], tensor_array[:-1, ::-1, -1:]))
self.assertTrue(
np.array_equal(local_out[12], tensor_array[1:2, 2:, ::-1]))
self.assertTrue(
np.array_equal(local_out[13], tensor_array[2:10, 2:, -2:-1]))
self.assertTrue(
np.array_equal(local_out[14], tensor_array[1:-1, 0:2, ::-1]))
self.assertTrue(
np.array_equal(local_out[15], tensor_array[::-1, ::-1, ::-1]))
self.assertTrue(np.array_equal(local_out[16], tensor_array[-4:4]))
self.assertTrue(np.array_equal(local_out[17], tensor_array[:, 0, 0:0]))
self.assertTrue(np.array_equal(local_out[18], tensor_array[:, 1:1:2]))
def _test_slice_for_tensor_attr(self):
tensor_array = np.array(
[[[1, 2, 3], [4, 5, 6], [7, 8, 9]],
[[10, 11, 12], [13, 14, 15], [16, 17, 18]],
[[19, 20, 21], [22, 23, 24], [25, 26, 27]]]).astype('float32')
var = paddle.to_tensor(tensor_array)
one = paddle.ones(shape=[1], dtype="int32")
two = paddle.full(shape=[1], fill_value=2, dtype="int32")
negative_one = paddle.full(shape=[1], fill_value=-1, dtype="int32")
four = paddle.full(shape=[1], fill_value=4, dtype="int32")
var = fluid.dygraph.to_variable(tensor_array)
var1 = var[0, one, one]
var2 = var[one:]
var3 = var[0:one]
var4 = var[::negative_one]
var5 = var[one, one:, one:]
var_reshape = fluid.layers.reshape(var, [3, negative_one, 3])
var6 = var_reshape[:, :, negative_one]
var7 = var[:, :, :negative_one]
var8 = var[:one, :one, :1]
var9 = var[:-1, :negative_one, :negative_one]
var10 = var[::negative_one, :one, :negative_one]
var11 = var[:negative_one, ::-1, negative_one:]
var12 = var[one:2, 2:, ::negative_one]
var13 = var[two:10, 2:, -2:negative_one]
var14 = var[1:negative_one, 0:2, ::negative_one]
var15 = var[::negative_one, ::-1, ::negative_one]
var16 = var[-4:4]
vars = [
var, var1, var2, var3, var4, var5, var6, var7, var8, var9, var10,
var11, var12, var13, var14, var15, var16
]
local_out = [var.numpy() for var in vars]
self.assertTrue(np.array_equal(local_out[1], tensor_array[0, 1, 1:2]))
self.assertTrue(np.array_equal(local_out[2], tensor_array[1:]))
self.assertTrue(np.array_equal(local_out[3], tensor_array[0:1]))
self.assertTrue(np.array_equal(local_out[4], tensor_array[::-1]))
self.assertTrue(np.array_equal(local_out[5], tensor_array[1, 1:, 1:]))
self.assertTrue(
np.array_equal(local_out[6],
tensor_array.reshape((3, -1, 3))[:, :, -1]))
self.assertTrue(np.array_equal(local_out[7], tensor_array[:, :, :-1]))
self.assertTrue(np.array_equal(local_out[8], tensor_array[:1, :1, :1]))
self.assertTrue(
np.array_equal(local_out[9], tensor_array[:-1, :-1, :-1]))
self.assertTrue(
np.array_equal(local_out[10], tensor_array[::-1, :1, :-1]))
self.assertTrue(
np.array_equal(local_out[11], tensor_array[:-1, ::-1, -1:]))
self.assertTrue(
np.array_equal(local_out[12], tensor_array[1:2, 2:, ::-1]))
self.assertTrue(
np.array_equal(local_out[13], tensor_array[2:10, 2:, -2:-1]))
self.assertTrue(
np.array_equal(local_out[14], tensor_array[1:-1, 0:2, ::-1]))
self.assertTrue(
np.array_equal(local_out[15], tensor_array[::-1, ::-1, ::-1]))
self.assertTrue(np.array_equal(local_out[16], tensor_array[-4:4]))
def _test_for_getitem_ellipsis_index(self):
shape = (64, 3, 5, 256)
np_fp32_value = np.random.random(shape).astype('float32')
np_int_value = np.random.randint(1, 100, shape)
var_fp32 = paddle.to_tensor(np_fp32_value)
var_int = paddle.to_tensor(np_int_value)
def assert_getitem_ellipsis_index(var_tensor, var_np):
var = [
var_tensor[..., 0].numpy(),
var_tensor[..., 1, 0].numpy(),
var_tensor[0, ..., 1, 0].numpy(),
var_tensor[1, ..., 1].numpy(),
var_tensor[2, ...].numpy(),
var_tensor[2, 0, ...].numpy(),
var_tensor[2, 0, 1, ...].numpy(),
var_tensor[...].numpy(),
var_tensor[:, ..., 100].numpy(),
]
self.assertTrue(np.array_equal(var[0], var_np[..., 0]))
self.assertTrue(np.array_equal(var[1], var_np[..., 1, 0]))
self.assertTrue(np.array_equal(var[2], var_np[0, ..., 1, 0]))
self.assertTrue(np.array_equal(var[3], var_np[1, ..., 1]))
self.assertTrue(np.array_equal(var[4], var_np[2, ...]))
self.assertTrue(np.array_equal(var[5], var_np[2, 0, ...]))
self.assertTrue(np.array_equal(var[6], var_np[2, 0, 1, ...]))
self.assertTrue(np.array_equal(var[7], var_np[...]))
self.assertTrue(np.array_equal(var[8], var_np[:, ..., 100]))
var_fp32 = paddle.to_tensor(np_fp32_value)
var_int = paddle.to_tensor(np_int_value)
assert_getitem_ellipsis_index(var_fp32, np_fp32_value)
assert_getitem_ellipsis_index(var_int, np_int_value)
# test 1 dim tensor
var_one_dim = paddle.to_tensor([1, 2, 3, 4])
self.assertTrue(
np.array_equal(var_one_dim[..., 0].numpy(), np.array([1])))
def _test_none_index(self):
shape = (8, 64, 5, 256)
np_value = np.random.random(shape).astype('float32')
var_tensor = paddle.to_tensor(np_value)
var = [
var_tensor[1, 0, None].numpy(),
var_tensor[None, ..., 1, 0].numpy(),
var_tensor[:, :, :, None].numpy(),
var_tensor[1, ..., 1, None].numpy(),
var_tensor[2, ..., None, None].numpy(),
var_tensor[None, 2, 0, ...].numpy(),
var_tensor[None, 2, None, 1].numpy(),
var_tensor[None].numpy(),
var_tensor[0, 0, None, 0, 0, None].numpy(),
var_tensor[None, None, 0, ..., None].numpy(),
var_tensor[..., None, :, None].numpy(),
var_tensor[0, 1:10:2, None, None, ...].numpy(),
]
self.assertTrue(np.array_equal(var[0], np_value[1, 0, None]))
self.assertTrue(np.array_equal(var[1], np_value[None, ..., 1, 0]))
self.assertTrue(np.array_equal(var[2], np_value[:, :, :, None]))
self.assertTrue(np.array_equal(var[3], np_value[1, ..., 1, None]))
self.assertTrue(np.array_equal(var[4], np_value[2, ..., None, None]))
self.assertTrue(np.array_equal(var[5], np_value[None, 2, 0, ...]))
self.assertTrue(np.array_equal(var[6], np_value[None, 2, None, 1]))
self.assertTrue(np.array_equal(var[7], np_value[None]))
self.assertTrue(
np.array_equal(var[8], np_value[0, 0, None, 0, 0, None]))
self.assertTrue(
np.array_equal(var[9], np_value[None, None, 0, ..., None]))
self.assertTrue(np.array_equal(var[10], np_value[..., None, :, None]))
# TODO(zyfncg) there is a bug of dimensions when slice step > 1 and
# indexs has int type
# self.assertTrue(
# np.array_equal(var[11], np_value[0, 1:10:2, None, None, ...]))
def _test_bool_index(self):
shape = (4, 2, 5, 64)
np_value = np.random.random(shape).astype('float32')
var_tensor = paddle.to_tensor(np_value)
index = [[True, True, True, True], [True, False, True, True],
[True, False, False, True], [False, 0, 1, True, True]]
index2d = np.array([[True, True], [False, False], [True, False],
[True, True]])
tensor_index = paddle.to_tensor(index2d)
var = [
var_tensor[index[0]].numpy(),
var_tensor[index[1]].numpy(),
var_tensor[index[2]].numpy(),
var_tensor[index[3]].numpy(),
var_tensor[paddle.to_tensor(index[0])].numpy(),
var_tensor[tensor_index].numpy(),
]
self.assertTrue(np.array_equal(var[0], np_value[index[0]]))
self.assertTrue(np.array_equal(var[1], np_value[index[1]]))
self.assertTrue(np.array_equal(var[2], np_value[index[2]]))
self.assertTrue(np.array_equal(var[3], np_value[index[3]]))
self.assertTrue(np.array_equal(var[4], np_value[index[0]]))
self.assertTrue(np.array_equal(var[5], np_value[index2d]))
self.assertTrue(
np.array_equal(var_tensor[var_tensor > 0.67], np_value[np_value >
0.67]))
self.assertTrue(
np.array_equal(var_tensor[var_tensor < 0.55], np_value[np_value <
0.55]))
with self.assertRaises(ValueError):
var_tensor[[False, False, False, False]]
with self.assertRaises(ValueError):
var_tensor[[True, False]]
with self.assertRaises(ValueError):
var_tensor[[True, False, False, False, False]]
with self.assertRaises(IndexError):
var_tensor[paddle.to_tensor([[True, False, False, False]])]
def _test_for_var(self):
np_value = np.random.random((30, 100, 100)).astype('float32')
w = fluid.dygraph.to_variable(np_value)
for i, e in enumerate(w):
self.assertTrue(np.array_equal(e.numpy(), np_value[i]))
def _test_numpy_index(self):
array = np.arange(120).reshape([4, 5, 6])
t = paddle.to_tensor(array)
self.assertTrue(np.array_equal(t[np.longlong(0)].numpy(), array[0]))
self.assertTrue(
np.array_equal(t[np.longlong(0):np.longlong(4):np.longlong(2)]
.numpy(), array[0:4:2]))
self.assertTrue(np.array_equal(t[np.int64(0)].numpy(), array[0]))
self.assertTrue(
np.array_equal(t[np.int32(1):np.int32(4):np.int32(2)].numpy(),
array[1:4:2]))
self.assertTrue(
np.array_equal(t[np.int16(0):np.int16(4):np.int16(2)].numpy(),
array[0:4:2]))
def _test_list_index(self):
# case1:
array = np.arange(120).reshape([6, 5, 4])
x = paddle.to_tensor(array)
py_idx = [[0, 2, 0, 1, 3], [0, 0, 1, 2, 0]]
idx = [paddle.to_tensor(py_idx[0]), paddle.to_tensor(py_idx[1])]
self.assertTrue(np.array_equal(x[idx].numpy(), array[py_idx]))
self.assertTrue(np.array_equal(x[py_idx].numpy(), array[py_idx]))
# case2:
tensor_x = paddle.to_tensor(
np.zeros(12).reshape(2, 6).astype(np.float32))
tensor_y1 = paddle.zeros([1], dtype='int32') + 2
tensor_y2 = paddle.zeros([1], dtype='int32') + 5
tensor_x[:, tensor_y1:tensor_y2] = 42
res = tensor_x.numpy()
exp = np.array([[0., 0., 42., 42., 42., 0.],
[0., 0., 42., 42., 42., 0.]])
self.assertTrue(np.array_equal(res, exp))
# case3:
row = np.array([0, 1, 2])
col = np.array([2, 1, 3])
self.assertTrue(np.array_equal(array[row, col], x[row, col].numpy()))
def func_test_slice(self):
with fluid.dygraph.guard():
self._test_slice()
self._test_slice_for_tensor_attr()
self._test_for_var()
self._test_for_getitem_ellipsis_index()
self._test_none_index()
self._test_bool_index()
self._test_numpy_index()
self._test_list_index()
var = fluid.dygraph.to_variable(self.array)
self.assertTrue(np.array_equal(var[1, :].numpy(), self.array[1, :]))
self.assertTrue(np.array_equal(var[::-1].numpy(), self.array[::-1]))
with self.assertRaises(IndexError):
y = var[self.shape[0]]
with self.assertRaises(IndexError):
y = var[0 - self.shape[0] - 1]
with self.assertRaises(IndexError):
mask = np.array([1, 0, 1, 0], dtype=bool)
var[paddle.to_tensor([0, 1]), mask]
def test_slice(self):
with _test_eager_guard():
self.func_test_slice()
self.func_test_slice()
def test_var_base_to_np(self):
with fluid.dygraph.guard():
var = fluid.dygraph.to_variable(self.array)
self.assertTrue(
np.array_equal(var.numpy(),
fluid.framework._var_base_to_np(var)))
def test_var_base_as_np(self):
with fluid.dygraph.guard():
var = fluid.dygraph.to_variable(self.array)
self.assertTrue(np.array_equal(var.numpy(), np.array(var)))
self.assertTrue(
np.array_equal(
var.numpy(), np.array(
var, dtype=np.float32)))
def test_if(self):
with fluid.dygraph.guard():
var1 = fluid.dygraph.to_variable(np.array([[[0]]]))
var2 = fluid.dygraph.to_variable(np.array([[[1]]]))
var1_bool = False
var2_bool = False
if var1:
var1_bool = True
if var2:
var2_bool = True
assert var1_bool == False, "if var1 should be false"
assert var2_bool == True, "if var2 should be true"
assert bool(var1) == False, "bool(var1) is False"
assert bool(var2) == True, "bool(var2) is True"
def test_to_static_var(self):
with fluid.dygraph.guard():
# Convert VarBase into Variable or Parameter
var_base = fluid.dygraph.to_variable(self.array, name="var_base_1")
static_var = var_base._to_static_var()
self._assert_to_static(var_base, static_var)
var_base = fluid.dygraph.to_variable(self.array, name="var_base_2")
static_param = var_base._to_static_var(to_parameter=True)
self._assert_to_static(var_base, static_param, True)
# Convert ParamBase into Parameter
fc = fluid.dygraph.Linear(
10,
20,
param_attr=fluid.ParamAttr(
learning_rate=0.001,
do_model_average=True,
regularizer=fluid.regularizer.L1Decay()))
weight = fc.parameters()[0]
static_param = weight._to_static_var()
self._assert_to_static(weight, static_param, True)
def _assert_to_static(self, var_base, static_var, is_param=False):
if is_param:
self.assertTrue(isinstance(static_var, fluid.framework.Parameter))
self.assertTrue(static_var.persistable, True)
if isinstance(var_base, fluid.framework.ParamBase):
for attr in ['trainable', 'is_distributed', 'do_model_average']:
self.assertEqual(
getattr(var_base, attr), getattr(static_var, attr))
self.assertEqual(static_var.optimize_attr['learning_rate'],
0.001)
self.assertTrue(
isinstance(static_var.regularizer,
fluid.regularizer.L1Decay))
else:
self.assertTrue(isinstance(static_var, fluid.framework.Variable))
attr_keys = ['block', 'dtype', 'type', 'name']
for attr in attr_keys:
self.assertEqual(getattr(var_base, attr), getattr(static_var, attr))
self.assertListEqual(list(var_base.shape), list(static_var.shape))
def test_tensor_str(self):
paddle.enable_static()
paddle.disable_static(paddle.CPUPlace())
paddle.seed(10)
a = paddle.rand([10, 20])
paddle.set_printoptions(4, 100, 3)
a_str = str(a)
expected = '''Tensor(shape=[10, 20], dtype=float32, place=Place(cpu), stop_gradient=True,
[[0.2727, 0.5489, 0.8655, ..., 0.2916, 0.8525, 0.9000],
[0.3806, 0.8996, 0.0928, ..., 0.9535, 0.8378, 0.6409],
[0.1484, 0.4038, 0.8294, ..., 0.0148, 0.6520, 0.4250],
...,
[0.3426, 0.1909, 0.7240, ..., 0.4218, 0.2676, 0.5679],
[0.5561, 0.2081, 0.0676, ..., 0.9778, 0.3302, 0.9559],
[0.2665, 0.8483, 0.5389, ..., 0.4956, 0.6862, 0.9178]])'''
self.assertEqual(a_str, expected)
paddle.enable_static()
def test_tensor_str2(self):
paddle.disable_static(paddle.CPUPlace())
a = paddle.to_tensor([[1.5111111, 1.0], [0, 0]])
a_str = str(a)
expected = '''Tensor(shape=[2, 2], dtype=float32, place=Place(cpu), stop_gradient=True,
[[1.5111, 1. ],
[0. , 0. ]])'''
self.assertEqual(a_str, expected)
paddle.enable_static()
def test_tensor_str3(self):
paddle.disable_static(paddle.CPUPlace())
a = paddle.to_tensor([[-1.5111111, 1.0], [0, -0.5]])
a_str = str(a)
expected = '''Tensor(shape=[2, 2], dtype=float32, place=Place(cpu), stop_gradient=True,
[[-1.5111, 1. ],
[ 0. , -0.5000]])'''
self.assertEqual(a_str, expected)
paddle.enable_static()
def test_tensor_str_scaler(self):
paddle.disable_static(paddle.CPUPlace())
a = paddle.to_tensor(np.array(False))
a_str = str(a)
expected = '''Tensor(shape=[], dtype=bool, place=Place(cpu), stop_gradient=True,
False)'''
self.assertEqual(a_str, expected)
paddle.enable_static()
def test_tensor_str_shape_with_zero(self):
paddle.disable_static(paddle.CPUPlace())
x = paddle.ones((10, 10))
y = paddle.fluid.layers.where(x == 0)
a_str = str(y)
expected = '''Tensor(shape=[0, 2], dtype=int64, place=Place(cpu), stop_gradient=True,
[])'''
self.assertEqual(a_str, expected)
paddle.enable_static()
def test_tensor_str_linewidth(self):
paddle.disable_static(paddle.CPUPlace())
paddle.seed(2021)
x = paddle.rand([128])
paddle.set_printoptions(
precision=4, threshold=1000, edgeitems=3, linewidth=80)
a_str = str(x)
expected = '''Tensor(shape=[128], dtype=float32, place=Place(cpu), stop_gradient=True,
[0.3759, 0.0278, 0.2489, 0.3110, 0.9105, 0.7381, 0.1905, 0.4726, 0.2435,
0.9142, 0.3367, 0.7243, 0.7664, 0.9915, 0.2921, 0.1363, 0.8096, 0.2915,
0.9564, 0.9972, 0.2573, 0.2597, 0.3429, 0.2484, 0.9579, 0.7003, 0.4126,
0.4274, 0.0074, 0.9686, 0.9910, 0.0144, 0.6564, 0.2932, 0.7114, 0.9301,
0.6421, 0.0538, 0.1273, 0.5771, 0.9336, 0.6416, 0.1832, 0.9311, 0.7702,
0.7474, 0.4479, 0.3382, 0.5579, 0.0444, 0.9802, 0.9874, 0.3038, 0.5640,
0.2408, 0.5489, 0.8866, 0.1006, 0.5881, 0.7560, 0.7928, 0.8604, 0.4670,
0.9285, 0.1482, 0.4541, 0.1307, 0.6221, 0.4902, 0.1147, 0.4415, 0.2987,
0.7276, 0.2077, 0.7551, 0.9652, 0.4369, 0.2282, 0.0047, 0.2934, 0.4308,
0.4190, 0.1442, 0.3650, 0.3056, 0.6535, 0.1211, 0.8721, 0.7408, 0.4220,
0.5937, 0.3123, 0.9198, 0.0275, 0.5338, 0.4622, 0.7521, 0.3609, 0.4703,
0.1736, 0.8976, 0.7616, 0.3756, 0.2416, 0.2907, 0.3246, 0.4305, 0.5717,
0.0735, 0.0361, 0.5534, 0.4399, 0.9260, 0.6525, 0.3064, 0.4573, 0.9210,
0.8269, 0.2424, 0.7494, 0.8945, 0.7098, 0.8078, 0.4707, 0.5715, 0.7232,
0.4678, 0.5047])'''
self.assertEqual(a_str, expected)
paddle.enable_static()
def test_tensor_str_linewidth2(self):
paddle.disable_static(paddle.CPUPlace())
paddle.seed(2021)
x = paddle.rand([128])
paddle.set_printoptions(precision=4, linewidth=160, sci_mode=True)
a_str = str(x)
expected = '''Tensor(shape=[128], dtype=float32, place=Place(cpu), stop_gradient=True,
[3.7587e-01, 2.7798e-02, 2.4891e-01, 3.1097e-01, 9.1053e-01, 7.3811e-01, 1.9045e-01, 4.7258e-01, 2.4354e-01, 9.1415e-01, 3.3666e-01, 7.2428e-01,
7.6640e-01, 9.9146e-01, 2.9215e-01, 1.3625e-01, 8.0957e-01, 2.9153e-01, 9.5642e-01, 9.9718e-01, 2.5732e-01, 2.5973e-01, 3.4292e-01, 2.4841e-01,
9.5794e-01, 7.0029e-01, 4.1260e-01, 4.2737e-01, 7.3788e-03, 9.6863e-01, 9.9102e-01, 1.4416e-02, 6.5640e-01, 2.9318e-01, 7.1136e-01, 9.3008e-01,
6.4209e-01, 5.3849e-02, 1.2730e-01, 5.7712e-01, 9.3359e-01, 6.4155e-01, 1.8320e-01, 9.3110e-01, 7.7021e-01, 7.4736e-01, 4.4793e-01, 3.3817e-01,
5.5794e-01, 4.4412e-02, 9.8023e-01, 9.8735e-01, 3.0376e-01, 5.6397e-01, 2.4082e-01, 5.4893e-01, 8.8659e-01, 1.0065e-01, 5.8812e-01, 7.5600e-01,
7.9280e-01, 8.6041e-01, 4.6701e-01, 9.2852e-01, 1.4821e-01, 4.5410e-01, 1.3074e-01, 6.2210e-01, 4.9024e-01, 1.1466e-01, 4.4154e-01, 2.9868e-01,
7.2758e-01, 2.0766e-01, 7.5508e-01, 9.6522e-01, 4.3688e-01, 2.2823e-01, 4.7394e-03, 2.9342e-01, 4.3083e-01, 4.1902e-01, 1.4416e-01, 3.6500e-01,
3.0560e-01, 6.5350e-01, 1.2115e-01, 8.7206e-01, 7.4081e-01, 4.2203e-01, 5.9372e-01, 3.1230e-01, 9.1979e-01, 2.7486e-02, 5.3383e-01, 4.6224e-01,
7.5211e-01, 3.6094e-01, 4.7034e-01, 1.7355e-01, 8.9763e-01, 7.6165e-01, 3.7557e-01, 2.4157e-01, 2.9074e-01, 3.2458e-01, 4.3049e-01, 5.7171e-01,
7.3509e-02, 3.6087e-02, 5.5341e-01, 4.3993e-01, 9.2601e-01, 6.5248e-01, 3.0640e-01, 4.5727e-01, 9.2104e-01, 8.2688e-01, 2.4243e-01, 7.4937e-01,
8.9448e-01, 7.0981e-01, 8.0783e-01, 4.7065e-01, 5.7154e-01, 7.2319e-01, 4.6777e-01, 5.0465e-01])'''
self.assertEqual(a_str, expected)
paddle.enable_static()
def test_tensor_str_bf16(self):
paddle.disable_static(paddle.CPUPlace())
a = paddle.to_tensor([[1.5, 1.0], [0, 0]])
a = paddle.cast(a, dtype=core.VarDesc.VarType.BF16)
paddle.set_printoptions(precision=4)
a_str = str(a)
expected = '''Tensor(shape=[2, 2], dtype=bfloat16, place=Place(cpu), stop_gradient=True,
[[1.5000, 1. ],
[0. , 0. ]])'''
self.assertEqual(a_str, expected)
paddle.enable_static()
def test_print_tensor_dtype(self):
paddle.disable_static(paddle.CPUPlace())
a = paddle.rand([1])
a_str = str(a.dtype)
expected = 'paddle.float32'
self.assertEqual(a_str, expected)
paddle.enable_static()
class TestVarBaseSetitem(unittest.TestCase):
def setUp(self):
self.set_dtype()
self.tensor_x = paddle.to_tensor(np.ones((4, 2, 3)).astype(self.dtype))
self.np_value = np.random.random((2, 3)).astype(self.dtype)
self.tensor_value = paddle.to_tensor(self.np_value)
def set_dtype(self):
self.dtype = "int32"
def _test(self, value):
if not _in_eager_mode():
self.assertEqual(self.tensor_x.inplace_version, 0)
id_origin = id(self.tensor_x)
self.tensor_x[0] = value
if not _in_eager_mode():
self.assertEqual(self.tensor_x.inplace_version, 1)
if isinstance(value, (six.integer_types, float)):
result = np.zeros((2, 3)).astype(self.dtype) + value
else:
result = self.np_value
self.assertTrue(np.array_equal(self.tensor_x[0].numpy(), result))
self.assertEqual(id_origin, id(self.tensor_x))
self.tensor_x[1:2] = value
if not _in_eager_mode():
self.assertEqual(self.tensor_x.inplace_version, 2)
self.assertTrue(np.array_equal(self.tensor_x[1].numpy(), result))
self.assertEqual(id_origin, id(self.tensor_x))
self.tensor_x[...] = value
if not _in_eager_mode():
self.assertEqual(self.tensor_x.inplace_version, 3)
self.assertTrue(np.array_equal(self.tensor_x[3].numpy(), result))
self.assertEqual(id_origin, id(self.tensor_x))
def func_test_value_tensor(self):
self._test(self.tensor_value)
def test_value_tensor(self):
with _test_eager_guard():
self.setUp()
self.func_test_value_tensor()
self.setUp()
self.func_test_value_tensor()
def func_test_value_numpy(self):
self._test(self.np_value)
def test_value_numpy(self):
with _test_eager_guard():
self.setUp()
self.func_test_value_numpy()
self.setUp()
self.func_test_value_numpy()
def func_test_value_int(self):
self._test(10)
def test_value_int(self):
with _test_eager_guard():
self.setUp()
self.func_test_value_int()
self.setUp()
self.func_test_value_int()
class TestVarBaseSetitemInt64(TestVarBaseSetitem):
def set_dtype(self):
self.dtype = "int64"
class TestVarBaseSetitemFp32(TestVarBaseSetitem):
def set_dtype(self):
self.dtype = "float32"
def test_value_float(self):
paddle.disable_static()
self._test(3.3)
class TestVarBaseSetitemFp64(TestVarBaseSetitem):
def set_dtype(self):
self.dtype = "float64"
class TestVarBaseInplaceVersion(unittest.TestCase):
def test_setitem(self):
paddle.disable_static()
var = paddle.ones(shape=[4, 2, 3], dtype="float32")
self.assertEqual(var.inplace_version, 0)
var[1] = 1
self.assertEqual(var.inplace_version, 1)
var[1:2] = 1
self.assertEqual(var.inplace_version, 2)
def test_bump_inplace_version(self):
paddle.disable_static()
var = paddle.ones(shape=[4, 2, 3], dtype="float32")
self.assertEqual(var.inplace_version, 0)
var._bump_inplace_version()
self.assertEqual(var.inplace_version, 1)
var._bump_inplace_version()
self.assertEqual(var.inplace_version, 2)
class TestVarBaseSlice(unittest.TestCase):
def test_slice(self):
paddle.disable_static()
np_x = np.random.random((3, 8, 8))
x = paddle.to_tensor(np_x, dtype="float64")
actual_x = x._slice(0, 1)
actual_x = paddle.to_tensor(actual_x)
self.assertEqual(actual_x.numpy().all(), np_x[0:1].all())
class TestVarBaseClear(unittest.TestCase):
def test_clear(self):
paddle.disable_static()
np_x = np.random.random((3, 8, 8))
x = paddle.to_tensor(np_x, dtype="float64")
x._clear()
self.assertEqual(str(x), "Tensor(Not initialized)")
class TestVarBaseOffset(unittest.TestCase):
def test_offset(self):
paddle.disable_static()
np_x = np.random.random((3, 8, 8))
x = paddle.to_tensor(np_x, dtype="float64")
expected_offset = 0
actual_x = x._slice(expected_offset, 1)
actual_x = paddle.to_tensor(actual_x)
self.assertEqual(actual_x._offset(), expected_offset)
class TestVarBaseShareBufferTo(unittest.TestCase):
def test_share_buffer_To(self):
paddle.disable_static()
np_src = np.random.random((3, 8, 8))
src = paddle.to_tensor(np_src, dtype="float64")
# empty_var
dst = core.VarBase()
src._share_buffer_to(dst)
self.assertEqual(src._is_shared_buffer_with(dst), True)
class TestVarBaseTo(unittest.TestCase):
def setUp(self):
paddle.disable_static()
self.np_x = np.random.random((3, 8, 8))
self.x = paddle.to_tensor(self.np_x, dtype="float32")
def test_to_api(self):
x_double = self.x._to(dtype='double')
self.assertEqual(x_double.dtype, paddle.fluid.core.VarDesc.VarType.FP64)
self.assertTrue(np.allclose(self.np_x, x_double))
x_ = self.x._to()
self.assertEqual(self.x.dtype, paddle.fluid.core.VarDesc.VarType.FP64)
self.assertTrue(np.allclose(self.np_x, x_))
if paddle.fluid.is_compiled_with_cuda():
x_gpu = self.x._to(device=paddle.CUDAPlace(0))
self.assertTrue(x_gpu.place.is_gpu_place())
self.assertEqual(x_gpu.place.gpu_device_id(), 0)
x_gpu0 = self.x._to(device='gpu:0')
self.assertTrue(x_gpu0.place.is_gpu_place())
self.assertEqual(x_gpu0.place.gpu_device_id(), 0)
x_gpu1 = self.x._to(device='gpu:0', dtype="float64")
self.assertTrue(x_gpu1.place.is_gpu_place())
self.assertEqual(x_gpu1.place.gpu_device_id(), 0)
self.assertEqual(x_gpu1.dtype,
paddle.fluid.core.VarDesc.VarType.FP64)
x_gpu2 = self.x._to(device='gpu:0', dtype="float16")
self.assertTrue(x_gpu2.place.is_gpu_place())
self.assertEqual(x_gpu2.place.gpu_device_id(), 0)
self.assertEqual(x_gpu2.dtype,
paddle.fluid.core.VarDesc.VarType.FP16)
x_cpu = self.x._to(device=paddle.CPUPlace())
self.assertTrue(x_cpu.place.is_cpu_place())
x_cpu0 = self.x._to(device='cpu')
self.assertTrue(x_cpu0.place.is_cpu_place())
x_cpu1 = self.x._to(device=paddle.CPUPlace(), dtype="float64")
self.assertTrue(x_cpu1.place.is_cpu_place())
self.assertEqual(x_cpu1.dtype, paddle.fluid.core.VarDesc.VarType.FP64)
x_cpu2 = self.x._to(device='cpu', dtype="float16")
self.assertTrue(x_cpu2.place.is_cpu_place())
self.assertEqual(x_cpu2.dtype, paddle.fluid.core.VarDesc.VarType.FP16)
self.assertRaises(ValueError, self.x._to, device=1)
self.assertRaises(AssertionError, self.x._to, blocking=1)
class TestVarBaseInitVarBaseFromTensorWithDevice(unittest.TestCase):
def test_varbase_init(self):
paddle.disable_static()
t = fluid.Tensor()
np_x = np.random.random((3, 8, 8))
t.set(np_x, fluid.CPUPlace())
if paddle.fluid.is_compiled_with_cuda():
device = paddle.CUDAPlace(0)
tmp = fluid.core.VarBase(t, device)
self.assertTrue(tmp.place.is_gpu_place())
self.assertEqual(tmp.numpy().all(), np_x.all())
device = paddle.CPUPlace()
tmp = fluid.core.VarBase(t, device)
self.assertEqual(tmp.numpy().all(), np_x.all())
class TestVarBaseNumel(unittest.TestCase):
def test_numel_normal(self):
paddle.disable_static()
np_x = np.random.random((3, 8, 8))
x = paddle.to_tensor(np_x, dtype="float64")
x_actual_numel = x._numel()
x_expected_numel = np.product((3, 8, 8))
self.assertEqual(x_actual_numel, x_expected_numel)
def test_numel_without_holder(self):
paddle.disable_static()
x_without_holder = core.VarBase()
x_actual_numel = x_without_holder._numel()
self.assertEqual(x_actual_numel, 0)
class TestVarBaseCopyGradientFrom(unittest.TestCase):
def test_copy_gradient_from(self):
paddle.disable_static()
np_x = np.random.random((2, 2))
np_y = np.random.random((2, 2))
x = paddle.to_tensor(np_x, dtype="float64", stop_gradient=False)
y = paddle.to_tensor(np_y, dtype="float64")
out = x + x
out.backward()
x._copy_gradient_from(y)
self.assertEqual(x.grad.numpy().all(), np_y.all())
if __name__ == '__main__':
paddle.enable_static()
unittest.main()
| 42.105603 | 151 | 0.570951 |
8363e61526ce20b4aa92f9a5bd946030ec43186d | 4,716 | py | Python | MultiverseClient/Scripts/SoundSource.py | dmacka/MultiverseClientServer | b64d7d754a0b2b1a3e5acabd4d6ebb80ab1d9379 | [
"MIT"
] | 5 | 2020-04-29T19:14:57.000Z | 2022-02-18T08:48:37.000Z | MultiverseClient/Scripts/SoundSource.py | dmacka/MultiverseClientServer | b64d7d754a0b2b1a3e5acabd4d6ebb80ab1d9379 | [
"MIT"
] | null | null | null | MultiverseClient/Scripts/SoundSource.py | dmacka/MultiverseClientServer | b64d7d754a0b2b1a3e5acabd4d6ebb80ab1d9379 | [
"MIT"
] | 2 | 2021-03-09T06:53:30.000Z | 2021-03-27T12:02:39.000Z | #
#
# The Multiverse Platform is made available under the MIT License.
#
# Copyright (c) 2012 The Multiverse Foundation
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify,
# merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software
# is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
#
#
import ClientAPI
class SoundSource:
def __init__(self):
assert False
#
# Property Getters
#
def _get_Position(self):
return self._soundSource.Position
def _get_Looping(self):
return self._soundSource.Looping
def _get_Ambient(self):
return self._soundSouce.Ambient
def _get_Gain(self):
return self._soundSource.Gain
def _get_SoundFile(self):
return self._soundSource.SoundFile
def _get_MaxAttenuationDistance(self):
return self._soundSource.MaxAttenuationDistance
def _get_MinAttenuationDistance(self):
return self._soundSource.MinAttenuationDistance
def _get_LinearAttenuation(self):
return self._soundSource.LinearAttenuation
def _get_Name(self):
return self._soundSource.Name
def __getattr__(self, attrname):
if attrname in self._getters:
return self._getters[attrname](self)
else:
raise AttributeError, attrname
#
# Property Setters
#
def _set_Position(self, pos):
self._soundSource.Position = pos
def _set_Looping(self, looping):
self._soundSource.Looping = looping
def _set_Ambient(self, ambient):
self._soundSource.Ambient = ambient
def _set_Gain(self, gain):
self._soundSource.Gain = gain
def _set_SoundFile(self, soundfile):
self._soundSource.SoundFile = soundfile
def _set_MinAttenuationDistance(self, min):
self._soundSource.MinAttenuationDistance = min
def _set_MaxAttenuationDistance(self, max):
self._soundSource.MaxAttenuationDistance = max
def _set_LinearAttenuation(self, linear):
self._soundSource.LinearAttenuation = linear
def __setattr__(self, attrname, value):
if attrname in self._setters:
self._setters[attrname](self, value)
else:
raise AttributeError, attrname
_getters = { 'Position': _get_Position, 'Looping': _get_Looping, 'Ambient': _get_Ambient, 'Gain': _get_Gain, 'SoundFile': _get_SoundFile, 'MinAttenuationDistance': _get_MinAttenuationDistance, 'MaxAttenuationDistance': _get_MaxAttenuationDistance, 'LinearAttenuation' : _get_LinearAttenuation, 'Name' : _get_Name }
_setters = { 'Position': _set_Position, 'Looping': _set_Looping, 'Ambient': _set_Ambient, 'Gain': _set_Gain, 'SoundFile': _set_SoundFile, 'MinAttenuationDistance': _set_MinAttenuationDistance, 'MaxAttenuationDistance': _set_MaxAttenuationDistance, 'LinearAttenuation' : _set_LinearAttenuation }
#
# Methods
#
def Stop(self):
self._soundSource.Stop()
def Play(self):
self._soundSource.Play()
def Remove(self, name):
self._soundSource.Remove(name)
#
# This class is just another way of making a SoundSource, with a different constructor,
# since we don't have constructor overloading within a single class. This should only
# be used internally by the API.
#
# Since SoundSources are all allocated by the SoundManager, this will be the only
# way to make SoundSource
#
class _ExistingSoundSource(SoundSource):
#
# Constructor
#
def __init__(self, soundSource):
self.__dict__['_soundSource'] = soundSource
def __setattr__(self, attrname, value):
SoundSource.__setattr__(self, attrname, value)
| 34.173913 | 318 | 0.692748 |
e31cf8348806c3d7956f8f1f14f90d9032a86f55 | 11,382 | py | Python | vgg16/50/mytest.py | aiiuii/AutoPruner | 93627070ed08d00160bad14e20660cddc6ff0e8d | [
"MIT"
] | 19 | 2020-08-09T15:08:41.000Z | 2021-12-28T06:46:54.000Z | vgg16/50/mytest.py | aiiuii/AutoPruner | 93627070ed08d00160bad14e20660cddc6ff0e8d | [
"MIT"
] | 1 | 2021-07-22T05:51:19.000Z | 2021-07-22T05:51:19.000Z | vgg16/50/mytest.py | aiiuii/AutoPruner | 93627070ed08d00160bad14e20660cddc6ff0e8d | [
"MIT"
] | 5 | 2020-10-21T08:47:13.000Z | 2022-02-11T11:02:51.000Z | # ************************************************************
# Author : Bumsoo Kim, 2017
# Github : https://github.com/meliketoy/fine-tuning.pytorch
#
# Korea University, Data-Mining Lab
# Deep Convolutional Network Fine tuning Implementation
#
# Description : main.py
# The main code for training classification networks.
# ***********************************************************
import torch
import torch.nn as nn
import torch.backends.cudnn as cudnn
import time
import os
import sys
import argparse
import numpy as np
import shutil
import math
from src_code import Network_FT
from src_code.lmdbdataset import lmdbDataset
parser = argparse.ArgumentParser(description='PyTorch Digital Mammography Training')
parser.add_argument('--lr', default=1e-3, type=float, help='learning rate')
parser.add_argument('--weight_decay', default=5e-4, type=float, help='weight decay')
parser.add_argument('--batch_size', default=256, type=int, help='batch size')
parser.add_argument('--num_epochs', default=2, type=int, help='number of training epochs')
parser.add_argument('--lr_decay_epoch', default=10, type=int, help='learning rate decay epoch')
parser.add_argument('--data_base', default='/data/zhangcl/ImageNet', type=str, help='the path of dataset')
parser.add_argument('--ft_model_path', default='/home/luojh2/.torch/models/vgg16-397923af.pth',
type=str, help='the path of fine tuned model')
parser.add_argument('--gpu_id', default='4,5,6,7', type=str,
help='id(s) for CUDA_VISIBLE_DEVICES')
parser.add_argument('--layer_id', default=7, type=int, help='the id of compressed layer, starting from 0')
parser.add_argument('--start_epoch', default=0, type=int, metavar='N',
help='manual epoch number (useful on restarts)')
parser.add_argument('--compression_rate', default=0.2, type=float, help='the percentage of 1 in compressed model')
parser.add_argument('--channel_index_range', default=20, type=int, help='the range to calculate channel index')
parser.add_argument('--print-freq', '-p', default=20, type=int,
metavar='N', help='print frequency (default: 10)')
args = parser.parse_args()
os.environ['CUDA_VISIBLE_DEVICES'] = args.gpu_id
best_prec1 = -1
print(args)
def main():
global args, best_prec1
# Phase 1 : Data Upload
print('\n[Phase 1] : Data Preperation')
train_loader = torch.utils.data.DataLoader(
lmdbDataset(os.path.join(args.data_base, 'ILSVRC-train.lmdb'), True),
batch_size=args.batch_size,
shuffle=True,
num_workers=16,
pin_memory=True)
val_loader = torch.utils.data.DataLoader(
lmdbDataset(os.path.join(args.data_base, 'ILSVRC-val.lmdb'), False),
batch_size=args.batch_size,
num_workers=16,
pin_memory=True)
print('data_loader_success!')
# Phase 2 : Model setup
print('\n[Phase 2] : Model setup')
if args.layer_id == 0:
model_ft = Network_FT.Vgg16(args.ft_model_path).cuda()
model_ft = torch.nn.DataParallel(model_ft)
model_param = model_ft.state_dict()
torch.save(model_param, 'checkpoint/model.pth')
model_ft = Network_FT.NetworkNew(args.layer_id).cuda()
weight = torch.load('checkpoint/layer_7/model.pth')
model_ft = torch.nn.DataParallel(model_ft)
model_ft.load_state_dict(weight)
cudnn.benchmark = True
print("model setup success!")
# Phase 3: fine_tune model
print('\n[Phase 3] : Model fine tune')
# define loss function (criterion) and optimizer
criterion = nn.CrossEntropyLoss().cuda()
optimizer = torch.optim.SGD(filter(lambda p: p.requires_grad, model_ft.parameters()), args.lr,
momentum=0.9,
weight_decay=args.weight_decay)
scale_factor = 9.0
for epoch in range(args.start_epoch, args.num_epochs):
adjust_learning_rate(optimizer, epoch, 1)
# train for one epoch
channel_index, scale_factor = train(train_loader, model_ft, criterion, optimizer, epoch, scale_factor)
# evaluate on validation set
prec1 = validate(val_loader, model_ft, criterion, channel_index)
# remember best prec@1 and save checkpoint
is_best = prec1 > best_prec1
if is_best:
best_prec1 = prec1
folder_path = 'checkpoint/layer_' + str(args.layer_id)
if not os.path.exists(folder_path):
os.makedirs(folder_path)
torch.save(model_ft.state_dict(), folder_path+'/model.pth')
torch.save(channel_index, folder_path+'/channel_index.pth')
def train(train_loader, model, criterion, optimizer, epoch, scale_factor):
gpu_num = torch.cuda.device_count()
scale_factor_mul = math.pow(100, 1.0/(args.num_epochs*len(train_loader)))
reg_lambda = 100
batch_time = AverageMeter()
data_time = AverageMeter()
losses = AverageMeter()
top1 = AverageMeter()
top5 = AverageMeter()
# switch to train mode
model.train()
channel_index_list = list()
channel_index = 0
end = time.time()
for i, (input, target) in enumerate(train_loader):
scale_factor = scale_factor * scale_factor_mul
# measure data loading time
data_time.update(time.time() - end)
target = target.cuda(async=True)
input_var = torch.autograd.Variable(input).cuda()
target_var = torch.autograd.Variable(target).cuda()
# compute output
output, scale_vec = model(input_var, scale_factor)
loss = criterion(output, target_var)
loss = loss + float(reg_lambda) * (
scale_vec.norm(1) / float(scale_vec.size(0)) - args.compression_rate) ** 2
# compute channel index
tmp = scale_vec.data.cpu().numpy().reshape(gpu_num, -1).mean(0)
channel_index_list.append(tmp.copy())
if len(channel_index_list) == args.channel_index_range:
channel_index_list = np.array(channel_index_list)
tmp = channel_index_list[0, :]
print('first 5 values: [{0:.6f}, {1:.6f}, {2:.6f}, {3:.6f}, {4:.6f}]'.format(tmp[0], tmp[1], tmp[2], tmp[3],
tmp[4]))
tmp2 = channel_index_list.sum(axis=0)
tmp2 = tmp2 / args.channel_index_range
for tmp_i in range(len(channel_index_list)):
channel_index_list[tmp_i] = (np.sign(channel_index_list[tmp_i] - 0.5) + 1) / 2.0
tmp = channel_index_list.sum(axis=0)
tmp = tmp / args.channel_index_range
channel_index = (np.sign(tmp - 0.5) + 1) / 2.0 # to 0-1 binary
real_pruning_rate = 100.0 * np.sum(tmp2 < 10**-6) / len(tmp2)
binary_pruning_rate = 100.0 * np.sum(channel_index < 10**-6) / len(channel_index)
tmp[tmp == 0] = 1
channel_inconsistency = 100.0 * np.sum(tmp != 1) / len(tmp)
print("pruning rate (real/binary): {0:.4f}%/{1:.4f}%, index inconsistency: {2:.4f}%".format(
real_pruning_rate, binary_pruning_rate, channel_inconsistency))
channel_index_list = list()
reg_lambda = 100.0 * np.abs(binary_pruning_rate/100.0 - 1 + args.compression_rate)
sys.stdout.flush()
# measure accuracy and record loss
prec1, prec5 = accuracy(output.data, target, topk=(1, 5))
losses.update(loss.data[0], input.size(0))
top1.update(prec1[0], input.size(0))
top5.update(prec5[0], input.size(0))
# compute gradient and do SGD step
optimizer.zero_grad()
loss.backward()
optimizer.step()
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
if i % args.print_freq == 0:
print('Epoch[{0}]: [{1}/{2}]\t'
'scale_factor: {3:.4f}\t'
'reg_lambda: {4:.4f}\t'
'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t'
'Prec@1 {top1.val:.3f} ({top1.avg:.3f})\t'
'Prec@5 {top5.val:.3f} ({top5.avg:.3f})'.format(
epoch, i, len(train_loader), scale_factor, reg_lambda, batch_time=batch_time,
top1=top1, top5=top5))
sys.stdout.flush()
return channel_index, scale_factor
def validate(val_loader, model, criterion, channel_index):
batch_time = AverageMeter()
losses = AverageMeter()
top1 = AverageMeter()
top5 = AverageMeter()
# switch to evaluate mode
model.eval()
end = time.time()
for i, (input, target) in enumerate(val_loader):
target = target.cuda(async=True)
input_var = torch.autograd.Variable(input, volatile=True)
target_var = torch.autograd.Variable(target, volatile=True)
# compute output
output, _ = model(input_var, 1.0, channel_index)
loss = criterion(output, target_var)
# measure accuracy and record loss
prec1, prec5 = accuracy(output.data, target, topk=(1, 5))
losses.update(loss.data[0], input.size(0))
top1.update(prec1[0], input.size(0))
top5.update(prec5[0], input.size(0))
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
if i % args.print_freq == 0:
print('Test: [{0}/{1}]\t'
'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t'
'Loss {loss.val:.4f} ({loss.avg:.4f})\t'
'Prec@1 {top1.val:.3f} ({top1.avg:.3f})\t'
'Prec@5 {top5.val:.3f} ({top5.avg:.3f})'.format(
i, len(val_loader), batch_time=batch_time, loss=losses,
top1=top1, top5=top5))
sys.stdout.flush()
print(' * Prec@1 {top1.avg:.3f} Prec@5 {top5.avg:.3f}'
.format(top1=top1, top5=top5))
return top1.avg
def save_checkpoint(state, is_best, filename='checkpoint.pth.tar'):
torch.save(state, filename)
if is_best:
shutil.copyfile(filename, 'model_best.pth.tar')
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
def adjust_learning_rate(optimizer, epoch, epoch_num):
"""Sets the learning rate to the initial LR decayed by 10 every 30 epochs"""
lr = args.lr * (0.1 ** (epoch // epoch_num))
print('| Learning Rate = %f' % lr)
for param_group in optimizer.param_groups:
param_group['lr'] = lr
def accuracy(output, target, topk=(1,)):
"""Computes the precision@k for the specified values of k"""
maxk = max(topk)
batch_size = target.size(0)
_, pred = output.topk(maxk, 1, True, True)
pred = pred.t()
correct = pred.eq(target.view(1, -1).expand_as(pred))
res = []
for k in topk:
correct_k = correct[:k].view(-1).float().sum(0, keepdim=True)
res.append(correct_k.mul_(100.0 / batch_size))
return res
if __name__ == "__main__":
main()
| 38.846416 | 120 | 0.613073 |
3345486131823fdf5062d784ce2823a262e10bca | 334 | py | Python | frappe_testing/config/docs.py | Abadulrehman/frappe_testing | 3aea7f68da4d567eeb3e76bf8f2656c29a61940d | [
"MIT"
] | 1 | 2021-12-15T06:04:34.000Z | 2021-12-15T06:04:34.000Z | frappe_testing/config/docs.py | Abadulrehman/frappe_testing | 3aea7f68da4d567eeb3e76bf8f2656c29a61940d | [
"MIT"
] | null | null | null | frappe_testing/config/docs.py | Abadulrehman/frappe_testing | 3aea7f68da4d567eeb3e76bf8f2656c29a61940d | [
"MIT"
] | null | null | null | """
Configuration for docs
"""
# source_link = "https://github.com/[org_name]/frappe_testing"
# docs_base_url = "https://[org_name].github.io/frappe_testing"
# headline = "App that does everything"
# sub_heading = "Yes, you got that right the first time, everything"
def get_context(context):
context.brand_html = "Frappe Testing"
| 27.833333 | 68 | 0.736527 |
8af78292e6562a9e8ea2a7c77104d877cf219543 | 1,554 | py | Python | testing/scripts/e2e_utils/install.py | glindsell/seldon-core | a6992832b74ed71bbd3a91c48b5a79a5cee785b2 | [
"Apache-2.0"
] | 1 | 2020-07-14T15:42:41.000Z | 2020-07-14T15:42:41.000Z | testing/scripts/e2e_utils/install.py | glindsell/seldon-core | a6992832b74ed71bbd3a91c48b5a79a5cee785b2 | [
"Apache-2.0"
] | 231 | 2020-08-10T08:38:42.000Z | 2021-08-02T20:56:49.000Z | testing/scripts/e2e_utils/install.py | glindsell/seldon-core | a6992832b74ed71bbd3a91c48b5a79a5cee785b2 | [
"Apache-2.0"
] | null | null | null | import os
from sh import helm, kubectl
SC_ROOT_PATH = os.path.abspath(
os.path.join(
__file__, os.path.pardir, os.path.pardir, os.path.pardir, os.path.pardir
)
)
HELM_CHARTS_PATH = os.path.join(SC_ROOT_PATH, "helm-charts")
SC_NAME = "seldon"
SC_NAMESPACE = "seldon-system"
def install_seldon(name=SC_NAME, namespace=SC_NAMESPACE, executor=True, version=None):
chart_path = "seldonio/seldon-core-operator"
if version is None:
# Use local
chart_path = os.path.join(HELM_CHARTS_PATH, "seldon-core-operator")
values = {
"istio.enabled": "true",
"istio.gateway": "istio-system/seldon-gateway",
"certManager.enabled": "false",
}
if not executor:
values["executor.enabled"] = "false"
helm.install(
name,
chart_path,
_to_helm_values_list(values),
namespace=namespace,
version=version,
wait=True,
)
def delete_seldon(name=SC_NAME, namespace=SC_NAMESPACE):
helm.delete(name, namespace=namespace)
# Helm 3.0.3 doesn't delete CRDs
kubectl.delete(
"crd", "seldondeployments.machinelearning.seldon.io", ignore_not_found=True
)
def _to_helm_values_list(values):
"""
The sh lib doesn't allow you to specify multiple instances of the same
kwarg. https://github.com/amoffat/sh/issues/529
The best option is to concatenate them into a list.
"""
values_list = []
for key, val in values.items():
values_list += ["--set", f"{key}={val}"]
return values_list
| 25.064516 | 86 | 0.65444 |
8c5f0ae49901556a71833bab35ed08f4d7ed6797 | 2,786 | py | Python | code/deeprt/py/peprt/trainer.py | omidroshani/DeepDIA | 2af96056a62a49ff6ff10b6e176f0fba3f495843 | [
"BSD-3-Clause"
] | 25 | 2020-01-09T18:30:57.000Z | 2021-12-25T20:09:40.000Z | code/deeprt/py/peprt/trainer.py | omidroshani/DeepDIA | 2af96056a62a49ff6ff10b6e176f0fba3f495843 | [
"BSD-3-Clause"
] | 2 | 2020-10-19T14:28:41.000Z | 2021-11-15T18:46:15.000Z | code/deeprt/py/peprt/trainer.py | omidroshani/DeepDIA | 2af96056a62a49ff6ff10b6e176f0fba3f495843 | [
"BSD-3-Clause"
] | 14 | 2020-01-24T14:44:46.000Z | 2022-02-24T12:37:33.000Z | from . import models
import numpy as np
import pandas as pd
from keras.callbacks import EarlyStopping, ModelCheckpoint, CSVLogger
from keras.models import load_model
def data_to_tensors(data, rt_min=-50, rt_max=150):
peptides = data[["sequence"]].values.flatten()
rt = data[["irt"]].values
x = models.peptide_to_tensor(peptides)
y = models.normalize(rt, min=rt_min, max=rt_max)
return x, y
def split_train_validate(x, y, validate_percent=.33, seed=None):
length = len(x)
np.random.seed(seed)
indexs = np.random.permutation(length)
train_end = int((1 - validate_percent) * length)
train_indexs = indexs[:train_end]
validate_indexs = indexs[train_end:]
x_train = x[train_indexs]
y_train = y[train_indexs]
x_validate =x[validate_indexs]
y_validate =y[validate_indexs]
return x_train, y_train, x_validate, y_validate, train_indexs, validate_indexs
class PeptideRTTrainer:
def __init__(self, model_path=None, model=None, rt_min=-50, rt_max=150, save_path="bestmodel.hdf5", save_best_only=True, log_path='training.log'):
if model_path is not None:
model = load_model(model_path)
elif model is None:
model = models.build_model()
self.model = model
self.rt_min = rt_min
self.rt_max = rt_max
self.save_path = save_path
self.save_best_only = save_best_only
self.log_path = log_path
def get_model(self):
return self.model
def save_model(self, path):
self.model.save(path)
def train(self, data, epochs=100, patience=15, validate_percent=.33, seed=0):
x, y = data_to_tensors(data, rt_min=self.rt_min, rt_max=self.rt_max)
x_train, y_train, x_validate, y_validate, train_indexs, validate_indexs = split_train_validate(x, y, validate_percent=0.33, seed=0)
split = {
'validate_percent': validate_percent,
'seed': seed,
'train': train_indexs.tolist(),
'validate': validate_indexs.tolist()
}
csvlogger = CSVLogger(self.log_path)
earlystopper = EarlyStopping(monitor='val_loss', patience=patience, verbose=1)
if self.save_path is not None:
checkpointer = ModelCheckpoint(filepath=self.save_path, verbose=1, save_best_only=self.save_best_only)
callbacks = [checkpointer, csvlogger, earlystopper]
else:
callbacks = [csvlogger, earlystopper]
history = self.model.fit(
x_train, y_train, epochs=epochs,
validation_data=(x_validate, y_validate),
callbacks=callbacks)
return {
'split': split,
'history': history.history
}
| 35.717949 | 150 | 0.646088 |
3affbff28a7caff056ce24385db9cb96ab7b19b6 | 1,574 | py | Python | sme_financing/main/models/client.py | BuildForSDG/team-214-backend | f1aff9c27d7b7588b4bbb2bc68956b35051d4506 | [
"MIT"
] | 1 | 2020-05-20T16:32:33.000Z | 2020-05-20T16:32:33.000Z | sme_financing/main/models/client.py | BuildForSDG/team-214-backend | f1aff9c27d7b7588b4bbb2bc68956b35051d4506 | [
"MIT"
] | 23 | 2020-05-19T07:12:53.000Z | 2020-06-21T03:57:54.000Z | sme_financing/main/models/client.py | BuildForSDG/team-214-backend | f1aff9c27d7b7588b4bbb2bc68956b35051d4506 | [
"MIT"
] | 1 | 2020-05-18T14:18:12.000Z | 2020-05-18T14:18:12.000Z | from datetime import datetime
from enum import Enum
from .. import db
class EducationLevel(Enum):
doctorate_degree = "Doctorate Degree"
masters_degree = "Masters Degree"
bachelors_degree = "Bachelors Degree"
hnd = "HND"
class Client(db.Model):
"""Client Model for storing client related details."""
__tablename__ = "clients"
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
lastname = db.Column(db.String(50), nullable=False)
firstname = db.Column(db.String(50), nullable=False)
gender = db.Column(db.String(20), nullable=True)
postal_address = db.Column(db.String(255), nullable=True)
residential_address = db.Column(db.String(255), nullable=True)
telephone = db.Column(db.String(50), nullable=False)
nationality = db.Column(db.String(50), nullable=False)
education_level = db.Column(
db.Enum(EducationLevel, values_callable=lambda obj: [el.value for el in obj]),
nullable=False,
default=EducationLevel.bachelors_degree.value,
)
position = db.Column(db.String(50), nullable=False)
user_id = db.Column(db.Integer, db.ForeignKey("users.id"), unique=True)
user = db.relationship("User", backref="client")
smes = db.relationship("SME", backref="client", lazy="dynamic")
created = db.Column(db.DateTime, nullable=False, default=datetime.utcnow)
updated = db.Column(db.DateTime, onupdate=datetime.utcnow)
def __repr__(self):
"""Returns this class representation."""
return f"<Client '{self.lastname} {self.firstname}'>"
| 34.977778 | 86 | 0.696315 |
133e63b44b555b4763787888feeeb54ec133b168 | 637 | py | Python | nengo_loihi/tests/__init__.py | Michaeljurado24/nengo-loihi | 47a18efcda3324f74493d014b431cfd0e5b9fbe2 | [
"Apache-2.0"
] | null | null | null | nengo_loihi/tests/__init__.py | Michaeljurado24/nengo-loihi | 47a18efcda3324f74493d014b431cfd0e5b9fbe2 | [
"Apache-2.0"
] | null | null | null | nengo_loihi/tests/__init__.py | Michaeljurado24/nengo-loihi | 47a18efcda3324f74493d014b431cfd0e5b9fbe2 | [
"Apache-2.0"
] | null | null | null | import nengo_loihi
def make_test_sim(request):
"""A Simulator factory to be used in tests.
The factory allows simulator arguments to be controlled via pytest command line
arguments.
This is used in the ``conftest.Simulator`` fixture, or can be be passed
to the ``nengo_simloader`` option when running the Nengo core tests.
"""
target = request.config.getoption("--target")
def TestSimulator(net, *args, **kwargs):
"""Simulator constructor to be used in tests"""
kwargs.setdefault("target", target)
return nengo_loihi.Simulator(net, *args, **kwargs)
return TestSimulator
| 28.954545 | 83 | 0.692308 |
13a4c86b450be363fb11fb9dedb49233e33d678c | 12,001 | py | Python | pyNastran/dev/op2_reader/tables/oes_stressStrain/real/oes_triax.py | jtran10/pyNastran | 4aed8e05b91576c2b50ee835f0497a9aad1d2cb0 | [
"BSD-3-Clause"
] | null | null | null | pyNastran/dev/op2_reader/tables/oes_stressStrain/real/oes_triax.py | jtran10/pyNastran | 4aed8e05b91576c2b50ee835f0497a9aad1d2cb0 | [
"BSD-3-Clause"
] | null | null | null | pyNastran/dev/op2_reader/tables/oes_stressStrain/real/oes_triax.py | jtran10/pyNastran | 4aed8e05b91576c2b50ee835f0497a9aad1d2cb0 | [
"BSD-3-Clause"
] | null | null | null | from __future__ import (nested_scopes, generators, division, absolute_import,
print_function, unicode_literals)
from itertools import count
from six import integer_types
import numpy as np
from numpy import zeros, searchsorted, ravel
ints = (int, np.int32)
from pyNastran.op2.tables.oes_stressStrain.real.oes_objects import StressObject, StrainObject, OES_Object
from pyNastran.f06.f06_formatting import write_floats_13e, _eigenvalue_header #, get_key0
try:
import pandas as pd # type: ignore
except ImportError:
pass
class RealTriaxArray(OES_Object):
def __init__(self, data_code, is_sort1, isubcase, dt):
OES_Object.__init__(self, data_code, isubcase, apply_data_code=False)
#self.code = [self.format_code, self.sort_code, self.s_code]
#self.ntimes = 0 # or frequency/mode
#self.ntotal = 0
self.ielement = 0
self.nelements = 0 # result specific
@property
def is_real(self):
return True
@property
def is_complex(self):
return False
def _reset_indices(self):
self.itotal = 0
self.ielement = 0
def _get_msgs(self):
raise NotImplementedError('%s needs to implement _get_msgs' % self.__class__.__name__)
def get_headers(self):
raise NotImplementedError('%s needs to implement get_headers' % self.__class__.__name__)
#return headers
def build(self):
"""sizes the vectorized attributes of the RealTriaxArray"""
if self.is_built:
return
#print("self.ielement =", self.ielement)
#print('ntimes=%s nelements=%s ntotal=%s' % (self.ntimes, self.nelements, self.ntotal))
assert self.ntimes > 0, 'ntimes=%s' % self.ntimes
assert self.nelements > 0, 'nelements=%s' % self.nelements
assert self.ntotal > 0, 'ntotal=%s' % self.ntotal
if self.element_type == 53:
nnodes_per_element = 1
else:
raise NotImplementedError(self.element_type)
self.itime = 0
self.ielement = 0
self.itotal = 0
#self.ntimes = 0
#self.nelements = 0
self.is_built = True
#print("***name=%s type=%s nnodes_per_element=%s ntimes=%s nelements=%s ntotal=%s" % (
#self.element_name, self.element_type, nnodes_per_element, self.ntimes, self.nelements, self.ntotal))
dtype = 'float32'
if isinstance(self.nonlinear_factor, integer_types):
dtype = 'int32'
self._times = zeros(self.ntimes, dtype=dtype)
self.element_node = zeros((self.ntotal, 2), dtype='int32')
# [radial, azimuthal, axial, shear, omax, oms, ovm]
self.data = zeros((self.ntimes, self.ntotal, 7), dtype='float32')
def build_dataframe(self):
"""creates a pandas dataframe"""
headers = self.get_headers()
element_node = [self.element_node[:, 0], self.element_node[:, 1]]
if self.nonlinear_factor is not None:
column_names, column_values = self._build_dataframe_transient_header()
self.data_frame = pd.Panel(self.data, items=column_values, major_axis=element_node, minor_axis=headers).to_frame()
self.data_frame.columns.names = column_names
self.data_frame.index.names = ['ElementID', 'NodeID', 'Item']
else:
self.data_frame = pd.Panel(self.data, major_axis=element_node, minor_axis=headers).to_frame()
self.data_frame.columns.names = ['Static']
self.data_frame.index.names = ['ElementID', 'NodeID', 'Item']
def __eq__(self, table):
assert self.is_sort1 == table.is_sort1
self._eq_header(table)
if not np.array_equal(self.element_node, table.element_node):
assert self.element_node.shape == table.element_node.shape, 'shape=%s element_node.shape=%s' % (
self.element_node.shape, table.element_node.shape)
msg = 'table_name=%r class_name=%s\n' % (self.table_name, self.__class__.__name__)
msg += '%s\nEid1, Eid2\n' % str(self.code_information())
for (eid1, nid1), (eid2, nid2) in zip(self.element_node, table.element_node):
msg += '(%s, %s) (%s, %s)\n' % (eid1, nid1, eid2, nid2)
print(msg)
raise ValueError(msg)
if not np.array_equal(self.data, table.data):
msg = 'table_name=%r class_name=%s\n' % (self.table_name, self.__class__.__name__)
msg += '%s\n' % str(self.code_information())
ntimes = self.data.shape[0]
i = 0
if self.is_sort1:
for itime in range(ntimes):
for ieid, eid in enumerate(self.element):
t1 = self.data[itime, ieid, :]
t2 = table.data[itime, ieid, :]
(radial1, azimuthal1, axial1, shear1, omax1, oms1, ovm1) = t1
(radial2, azimuthal2, axial2, shear2, omax2, oms2, ovm2) = t2
if not np.allclose(t1, t2):
#if not np.array_equal(t1, t2):
msg += '%s\n (%s, %s, %s, %s, %s, %s, %s)\n (%s, %s, %s, %s, %s, %s, %s)\n' % (
eid,
radial1, azimuthal1, axial1, shear1, omax1, oms1, ovm1,
radial2, azimuthal2, axial2, shear2, omax2, oms2, ovm2)
i += 1
if i > 10:
print(msg)
raise ValueError(msg)
else:
raise NotImplementedError(self.is_sort2)
if i > 0:
print(msg)
raise ValueError(msg)
return True
def add_sort1(self, dt, eid, nid, radial, azimuthal, axial, shear, omax, oms, ovm):
"""unvectorized method for adding SORT1 transient data"""
assert isinstance(eid, ints)
assert isinstance(eid, int) and eid > 0, 'dt=%s eid=%s' % (dt, eid)
self._times[self.itime] = dt
self.element_node[self.itotal, :] = [eid, nid]
self.data[self.itime, self.itotal, :] = [radial, azimuthal, axial, shear, omax, oms, ovm]
self.itotal += 1
self.ielement += 1
def get_stats(self, short=False):
if not self.is_built:
return [
'<%s>\n' % self.__class__.__name__,
' ntimes: %i\n' % self.ntimes,
' ntotal: %i\n' % self.ntotal,
]
nelements = self.ntotal
ntimes = self.ntimes
ntotal = self.ntotal
nelements = self.ntotal
msg = []
if self.nonlinear_factor is not None: # transient
msg.append(' type=%s ntimes=%i nelements=%i\n'
% (self.__class__.__name__, ntimes, nelements))
ntimes_word = 'ntimes'
else:
msg.append(' type=%s nelements=%i\n'
% (self.__class__.__name__, nelements))
ntimes_word = '1'
headers = self.get_headers()
n = len(headers)
assert n == self.data.shape[2], 'nheaders=%s shape=%s' % (n, str(self.data.shape))
msg.append(' data: [%s, ntotal, %i] where %i=[%s]\n' % (ntimes_word, n, n, str(', '.join(headers))))
msg.append(' element_node.shape = %s\n' % str(self.element_node.shape).replace('L', ''))
msg.append(' data.shape = %s\n' % str(self.data.shape).replace('L', ''))
msg.append(' element type: %s\n' % self.element_name)
msg += self.get_data_code()
return msg
def get_element_index(self, eids):
# elements are always sorted; nodes are not
itot = searchsorted(eids, self.element) #[0]
return itot
def eid_to_element_node_index(self, eids):
ind = ravel([searchsorted(self.element == eid) for eid in eids])
return ind
def write_f06(self, f06_file, header=None, page_stamp='PAGE %s',
page_num=1, is_mag_phase=False, is_sort1=True):
if header is None:
header = []
msg = self._get_msgs()
(ntimes, unused_ntotal) = self.data.shape[:2]
eids = self.element_node[:, 0]
nids = self.element_node[:, 1]
for itime in range(ntimes):
dt = self._times[itime]
header = _eigenvalue_header(self, header, itime, ntimes, dt)
f06_file.write(''.join(header + msg))
#[radial, azimuthal, axial, shear, omax, oms, ovm]
radial = self.data[itime, :, 0]
azimuthal = self.data[itime, :, 1]
axial = self.data[itime, :, 2]
shear = self.data[itime, :, 3]
omax = self.data[itime, :, 4]
oms = self.data[itime, :, 5]
ovm = self.data[itime, :, 6]
for (i, eid, nid, radiali, azimuthali, axiali, sheari, omaxi, omsi, ovmi) in zip(
count(), eids, nids, radial, azimuthal, axial, shear, omax, oms, ovm):
vals = [radiali, azimuthali, axiali, sheari, omaxi, omsi, ovmi]
vals2 = write_floats_13e(vals)
[radiali, azimuthali, axiali, sheari, omaxi, omsi, ovmi] = vals2
f06_file.write(
'0%8i %-13s %-13s %-13s %-13s %-13s %-13s %-13s %s\n'
% (eid, nid, radiali, azimuthali, axiali, sheari, omaxi, omsi, ovmi))
f06_file.write(page_stamp % page_num)
page_num += 1
if self.nonlinear_factor is None:
page_num -= 1
return page_num
class RealTriaxStressArray(RealTriaxArray, StressObject):
def __init__(self, data_code, is_sort1, isubcase, dt):
RealTriaxArray.__init__(self, data_code, is_sort1, isubcase, dt)
StressObject.__init__(self, data_code, isubcase)
def get_headers(self):
headers = ['radial', 'azimuthal', 'axial', 'shear', 'omax', 'oms', 'ovm']
return headers
def _get_msgs(self):
if self.element_type == 53:
pass
else:
raise NotImplementedError(self.element_type)
msg = [
' S T R E S S E S I N T R I A X 6 E L E M E N T S\n',
' ELEMENT GRID ID STRESSES IN MATERIAL COORD SYSTEM MAX MAG MAX VON MISES \n',
' ID RADIAL AZIMUTHAL AXIAL SHEAR PRINCIPAL SHEAR\n',
#' 5351 0 -9.726205E+02 -1.678908E+03 -1.452340E+03 -1.325111E+02 -1.678908E+03 3.702285E+02 6.654553E+02
#' 4389 -9.867789E+02 -1.624276E+03 -1.388424E+03 -9.212539E+01 -1.624276E+03 3.288099E+02 5.806334E+02
]
return msg
class RealTriaxStrainArray(RealTriaxArray, StrainObject):
def __init__(self, data_code, is_sort1, isubcase, dt):
RealTriaxArray.__init__(self, data_code, is_sort1, isubcase, dt)
StrainObject.__init__(self, data_code, isubcase)
def get_headers(self):
headers = ['radial', 'azimuthal', 'axial', 'shear', 'omax', 'oms', 'ovm']
return headers
def _get_msgs(self):
if self.element_type == 53:
pass
else:
raise NotImplementedError(self.element_type)
msg = [
' S T R A I N S I N T R I A X 6 E L E M E N T S\n',
' ELEMENT GRID ID STRAINS IN MATERIAL COORD SYSTEM MAX MAG MAX VON MISES \n',
' ID RADIAL AZIMUTHAL AXIAL SHEAR PRINCIPAL SHEAR\n',
#' 5351 0 -9.726205E+02 -1.678908E+03 -1.452340E+03 -1.325111E+02 -1.678908E+03 3.702285E+02 6.654553E+02
#' 4389 -9.867789E+02 -1.624276E+03 -1.388424E+03 -9.212539E+01 -1.624276E+03 3.288099E+02 5.806334E+02
]
return msg
| 44.121324 | 135 | 0.558787 |
10bd12ba41f5834c3e1092ad668a00a69acf4f56 | 1,022 | py | Python | migrations/0082_auto_20200324_1646.py | audaciouscode/PassiveDataKit-Django | ed1e00c436801b9f49a3e0e6657c2adb6b2ba3d4 | [
"Apache-2.0"
] | 5 | 2016-01-26T19:19:44.000Z | 2018-12-12T18:04:04.000Z | migrations/0082_auto_20200324_1646.py | audacious-software/PassiveDataKit-Django | da91a375c075ceec938f2c9bb6b011f9f019b024 | [
"Apache-2.0"
] | 6 | 2020-02-17T20:16:28.000Z | 2021-12-13T21:51:20.000Z | migrations/0082_auto_20200324_1646.py | audacious-software/PassiveDataKit-Django | da91a375c075ceec938f2c9bb6b011f9f019b024 | [
"Apache-2.0"
] | 4 | 2020-01-29T15:36:58.000Z | 2021-06-01T18:55:26.000Z | # pylint: skip-file
# -*- coding: utf-8 -*-
# Generated by Django 1.11.28 on 2020-03-24 20:46
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('passive_data_kit', '0081_auto_20200227_0918'),
]
operations = [
migrations.AlterField(
model_name='datapoint',
name='generator',
field=models.CharField(max_length=1024),
),
migrations.AlterField(
model_name='datapoint',
name='source',
field=models.CharField(max_length=1024),
),
migrations.AlterIndexTogether(
name='datapoint',
index_together=set([('generator_definition', 'source_reference'), ('generator_definition', 'source_reference', 'created', 'recorded'), ('generator_definition', 'created'), ('generator_definition', 'source_reference', 'recorded'), ('generator_definition', 'source_reference', 'created'), ('source_reference', 'created')]),
),
]
| 32.967742 | 333 | 0.625245 |
1c9a8d41ba6271426bd641dd487e7519676c71d9 | 344 | py | Python | zoo/public/hrnet/hrnet_w18.py | megvii-research/basecls | 6b395a0a888370b4523764afb78a5a7634a3f6cd | [
"Apache-2.0"
] | 23 | 2021-12-08T02:35:01.000Z | 2022-03-16T02:23:19.000Z | zoo/public/hrnet/hrnet_w18.py | megvii-research/basecls | 6b395a0a888370b4523764afb78a5a7634a3f6cd | [
"Apache-2.0"
] | 4 | 2021-12-23T11:31:17.000Z | 2022-02-28T01:35:31.000Z | zoo/public/hrnet/hrnet_w18.py | megvii-research/basecls | 6b395a0a888370b4523764afb78a5a7634a3f6cd | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2014-2021 Megvii Inc. All rights reserved.
from basecls.configs import HRNetConfig
_cfg = dict(
model=dict(
name="hrnet_w18",
),
)
class Cfg(HRNetConfig):
def __init__(self, values_or_file=None, **kwargs):
super().__init__(_cfg)
self.merge(values_or_file, **kwargs)
| 21.5 | 58 | 0.665698 |
b6981970a55f1f8f442b1f3b0a3b108c073bbf13 | 9,589 | py | Python | pureport_client/commands/accounts/__init__.py | ellievaughn/pureport-python-client | e0c80c7200549723820169da3d137dd771be3f11 | [
"MIT"
] | null | null | null | pureport_client/commands/accounts/__init__.py | ellievaughn/pureport-python-client | e0c80c7200549723820169da3d137dd771be3f11 | [
"MIT"
] | null | null | null | pureport_client/commands/accounts/__init__.py | ellievaughn/pureport-python-client | e0c80c7200549723820169da3d137dd771be3f11 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*_
#
# Copyright (c) 2020, Pureport, Inc.
# All Rights Reserved
from __future__ import absolute_import
from click import (
option,
argument
)
from pureport_client.util import JSON
from pureport_client.commands import CommandBase
class Command(CommandBase):
"""Manage Pureport accounts
"""
@option('-i', '--ids', multiple=True, help='Find a particular set of accounts by their ids.')
@option('-p', '--parent_id', help='Find all children accounts of a single parent account.')
@option('-n', '--name', help='Search for accounts by their name.')
@option('-l', '--limit', type=int, help='Limit the number of results.')
def list(self, ids=None, parent_id=None, name=None, limit=None):
"""Get a list of all accounts.
\f
:param ids: a list of account ids to find
:type ids: list
:param parent_id: a parent acocunt id
:type parent_id: str
:param name: a name for lowercase inter-word checking
:type name: str
:param limit: the max number of entrie to return
:type limit: int
:returns: a liist of accounts
:rtype: list
"""
query = {'ids': ids, 'parentId': parent_id, 'name': name, 'limit': limit}
kwargs = {'query': dict(((k, v) for k, v in query.items() if v))}
return self.client.find_all_accounts(**kwargs)
@argument('account_id')
def get(self, account_id):
"""Get an account by its id.
\f
:param str account_id: the account id
:rtype: Account
:raises: .exception.ClientHttpError
"""
self.__call__('get', '/accounts/{}'.format(account_id))
@argument('account', type=JSON)
def create(self, account):
"""Create a new Pureport account
\f
:param account: Account object to be created
:type account: dict
:returns: the created Account object
:rtype: dict
"""
return self.__call__('post', '/accounts', json=account)
@argument('account', type=JSON)
def update(self, account):
"""Update an existing account.
\f
:param account: the Account object
:type account: dict
:returns: an updated Account object
:rtype: dict
"""
return self.__call__('put', '/accounts/{id}'.format(**account), json=account)
@argument('account_id')
def delete(self, account_id):
"""Delete an existing account.
\f
:param account_id: the id of the account to delete
:type account_id: str
:returns: None
"""
self.__call__('delete', '/accounts/{}'.format(account_id))
@option('-a', '--account_id', envvar='PUREPORT_ACCOUNT_ID', required=True)
def api_keys(self, account_id):
"""Manage Pureport account API kyes
\f
:param account_id: the id of the account to manage
:type account_id: str
:returns: a command instance
:rtype: Command
"""
from pureport_client.commands.accounts.api_keys import Command
return Command(self.client, account_id)
@option('-a', '--account_id', envvar='PUREPORT_ACCOUNT_ID', required=True)
def audit_log(self, account_id):
"""Manage Pureport account audit logs
\f
:param account_id: the id of the account to manage
:type account_id: str
:returns: a command instance
:rtype: Command
"""
from pureport_client.commands.accounts.audit_log import Command
return Command(self.client, account_id)
@option('-a', '--account_id', envvar='PUREPORT_ACCOUNT_ID', required=True)
def billing(self, account_id):
"""Manage Pureport account billing details
\f
:param account_id: the id of the account to manage
:type account_id: str
:returns: a command instance
:rtype: Command
"""
from pureport_client.commands.accounts.billing import Command
return Command(self.client, account_id)
@option('-a', '--account_id', envvar='PUREPORT_ACCOUNT_ID', required=True)
def connections(self, account_id):
"""Manage Pureport account connections
\f
:param account_id: the id of the account to manage
:type account_id: str
:returns: accounts commands command
:rtype: Command
"""
from pureport_client.commands.accounts.connections import Command
return Command(self.client, account_id)
@option('-a', '--account_id', envvar='PUREPORT_ACCOUNT_ID', required=True)
def consent(self, account_id):
"""Manage Pureport account consent agreements
\f
:param account_id: the id of the account to manage
:type account_id: str
:returns: a command instance
:rtype: Command
"""
from pureport_client.commands.accounts.consent import Command
return Command(self.client, account_id)
@option('-a', '--account_id', envvar='PUREPORT_ACCOUNT_ID', required=True)
def invites(self, account_id):
"""Manage Pureport account invitations
\f
:param account_id: the id of the account to manage
:type account_id: str
:returns: a command instance
:rtype: Command
"""
from pureport_client.commands.accounts.invites import Command
return Command(self.client, account_id)
@option('-a', '--account_id', envvar='PUREPORT_ACCOUNT_ID', required=True)
def invoices(self, account_id):
"""Manage Pureport account invoices
\f
:param account_id: the id of the account to manage
:type account_id: str
:returns: a command instance
:rtype: Command
"""
from pureport_client.commands.accounts.invoices import Command
return Command(self.client, account_id)
@option('-a', '--account_id', envvar='PUREPORT_ACCOUNT_ID', required=True)
def members(self, account_id):
"""Manage Pureport account members
\f
:param account_id: the id of the account to manage
:type account_id: str
:returns: a command instance
:rtype: Command
"""
from pureport_client.commands.accounts.members import Command
return Command(self.client, account_id)
@option('-a', '--account_id', envvar='PUREPORT_ACCOUNT_ID', required=True)
def metrics(self, account_id):
"""Manage Pureport account metrics
\f
:param account_id: the id of the account to manage
:type account_id: str
:returns: a command instance
:rtype: Command
"""
from pureport_client.commands.accounts.metrics import Command
return Command(self.client, account_id)
@option('-a', '--account_id', envvar='PUREPORT_ACCOUNT_ID', required=True)
def networks(self, account_id):
"""Manage Pureport account networks
\f
:param account_id: the id of the account to manage
:type account_id: str
:returns: a command instance
:rtype: Command
"""
from pureport_client.commands.accounts.networks import Command
return Command(self.client, account_id)
@option('-a', '--account_id', envvar='PUREPORT_ACCOUNT_ID', required=True)
def permissions(self, account_id):
"""Manage Pureport account permissions
\f
:param account_id: the id of the account to manage
:type account_id: str
:returns: a command instance
:rtype: Command
"""
from pureport_client.commands.accounts.permissions import Command
return Command(self.client, account_id)
@option('-a', '--account_id', envvar='PUREPORT_ACCOUNT_ID', required=True)
def ports(self, account_id):
"""Manage Pureport account ports
\f
:param account_id: the id of the account to manage
:type account_id: str
:returns: a command instance
:rtype: Command
"""
from pureport_client.commands.accounts.ports import Command
return Command(self.client, account_id)
@option('-a', '--account_id', envvar='PUREPORT_ACCOUNT_ID', required=True)
def roles(self, account_id):
"""Manage Pureport account roles
\f
:param account_id: the id of the account to manage
:type account_id: str
:returns: a command instance
:rtype: Command
"""
from pureport_client.commands.accounts.roles import Command
return Command(self.client, account_id)
@option('-a', '--account_id', envvar='PUREPORT_ACCOUNT_ID', required=True)
def supported_connections(self, account_id):
"""Manage Pureport account supported connections
\f
:param account_id: the id of the account to manage
:type account_id: str
:returns: a command instance
:rtype: Command
"""
from pureport_client.commands.accounts.supported_connections import Command
return Command(self.client, account_id)
@option('-a', '--account_id', envvar='PUREPORT_ACCOUNT_ID', required=True)
def supported_ports(self, account_id):
"""Manage Pureport account supported ports
\f
:param account_id: the id of the account to manage
:type account_id: str
:returns: a command instance
:rtype: Command
"""
from pureport_client.commands.accounts.supported_ports import Command
return Command(self.client, account_id)
| 31.336601 | 97 | 0.630201 |
adb319cfa7747d674b6a4e109d081578ddc9f62e | 4,302 | py | Python | positions/examples/store/tests.py | Elec/django-positions | 1ee904ccfc82679d630cbab4358c5d6cc763f6df | [
"BSD-3-Clause"
] | null | null | null | positions/examples/store/tests.py | Elec/django-positions | 1ee904ccfc82679d630cbab4358c5d6cc763f6df | [
"BSD-3-Clause"
] | 1 | 2020-09-03T06:06:23.000Z | 2020-09-03T06:06:23.000Z | positions/examples/store/tests.py | elec/django-positions | 1ee904ccfc82679d630cbab4358c5d6cc763f6df | [
"BSD-3-Clause"
] | null | null | null | import doctest
import unittest
from django.db import models
from positions import PositionField
from positions.examples.store.models import Product, Category, ProductCategory
from django.test import TestCase
class StoreTestCase(TestCase):
def setUp(self):
pass
def tearDown(self):
Category.objects.all().delete()
Product.objects.all().delete()
ProductCategory.objects.all().delete()
# @unittest.skip("Some reason. If you are reading this in a test run someone did not fill this in.")
def test_doctests_standin(self):
# This code just contains the old doctests for this module. They should be most likely split out into their own
# tests at some point.
self.clothes = Category.objects.create(name="Clothes")
self.sporting_goods = Category.objects.create(name="Sporting Goods")
self.bat = Product.objects.create(name="Bat")
self.bat_in_sporting_goods = ProductCategory.objects.create(product=self.bat, category=self.sporting_goods)
self.cap = Product.objects.create(name="Cap")
self.cap_in_sporting_goods = ProductCategory.objects.create(product=self.cap, category=self.sporting_goods)
self.cap_in_clothes = ProductCategory.objects.create(product=self.cap, category=self.clothes)
self.glove = Product.objects.create(name="Glove")
self.glove_in_sporting_goods = ProductCategory.objects.create(product=self.glove, category=self.sporting_goods)
self.tshirt = Product.objects.create(name="T-shirt")
self.tshirt_in_clothes = ProductCategory.objects.create(product=self.tshirt, category=self.clothes)
self.jeans = Product.objects.create(name="Jeans")
self.jeans_in_clothes = ProductCategory.objects.create(product=self.jeans, category=self.clothes)
self.jersey = Product.objects.create(name="Jersey")
self.jersey_in_sporting_goods = ProductCategory.objects.create(product=self.jersey, category=self.sporting_goods)
self.jersey_in_clothes = ProductCategory.objects.create(product=self.jersey, category=self.clothes)
self.ball = Product.objects.create(name="Ball")
self.ball_in_sporting_goods = ProductCategory.objects.create(product=self.ball, category=self.sporting_goods)
actual_order = list(ProductCategory.objects.filter(category=self.clothes).values_list('product__name', 'position').order_by('position'))
expected_order = [(u'Cap', 0), (u'T-shirt', 1), (u'Jeans', 2), (u'Jersey', 3)]
self.assertEqual(actual_order, expected_order)
actual_order = list(ProductCategory.objects.filter(category=self.sporting_goods).values_list('product__name', 'position').order_by('position'))
expected_order = [(u'Bat', 0), (u'Cap', 1), (u'Glove', 2), (u'Jersey', 3), (u'Ball', 4)]
self.assertEqual(actual_order, expected_order)
# Moving cap in sporting goods shouldn't effect its position in clothes.
self.cap_in_sporting_goods.position = -1
self.cap_in_sporting_goods.save()
actual_order = list(ProductCategory.objects.filter(category=self.clothes).values_list('product__name', 'position').order_by('position'))
expected_order = [(u'Cap', 0), (u'T-shirt', 1), (u'Jeans', 2), (u'Jersey', 3)]
self.assertEqual(actual_order, expected_order)
actual_order = list(ProductCategory.objects.filter(category=self.sporting_goods).values_list('product__name', 'position').order_by('position'))
expected_order = [(u'Bat', 0), (u'Glove', 1), (u'Jersey', 2), (u'Ball', 3), (u'Cap', 4)]
self.assertEqual(actual_order, expected_order)
# Deleting an object should reorder both collections.
self.cap.delete()
actual_order = list(ProductCategory.objects.filter(category=self.clothes).values_list('product__name', 'position').order_by('position'))
expected_order = [(u'T-shirt', 0), (u'Jeans', 1), (u'Jersey', 2)]
self.assertEqual(actual_order, expected_order)
actual_order = list(ProductCategory.objects.filter(category=self.sporting_goods).values_list('product__name', 'position').order_by('position'))
expected_order = [(u'Bat', 0), (u'Glove', 1), (u'Jersey', 2), (u'Ball', 3)]
self.assertEqual(actual_order, expected_order)
| 53.111111 | 151 | 0.709205 |
a13f11ff7935da340ca27e899d494890144c5d54 | 282 | py | Python | eggs/numpy-1.9.1-py2.7-linux-x86_64.egg/numpy/random/mtrand.py | kruthikarshankar/bemoss_os | 460a5a41b38240bb9f6dacc23d373ae1942259a8 | [
"Unlicense"
] | 3 | 2018-11-25T01:09:55.000Z | 2021-08-24T01:56:36.000Z | eggs/numpy-1.9.1-py2.7-linux-x86_64.egg/numpy/random/mtrand.py | kwarodom/bemoss_os_1.2 | 460a5a41b38240bb9f6dacc23d373ae1942259a8 | [
"Unlicense"
] | null | null | null | eggs/numpy-1.9.1-py2.7-linux-x86_64.egg/numpy/random/mtrand.py | kwarodom/bemoss_os_1.2 | 460a5a41b38240bb9f6dacc23d373ae1942259a8 | [
"Unlicense"
] | 3 | 2018-11-09T03:38:09.000Z | 2020-02-24T06:26:10.000Z | def __bootstrap__():
global __bootstrap__, __loader__, __file__
import sys, pkg_resources, imp
__file__ = pkg_resources.resource_filename(__name__, 'mtrand.so')
__loader__ = None; del __bootstrap__, __loader__
imp.load_dynamic(__name__,__file__)
__bootstrap__()
| 35.25 | 69 | 0.765957 |
89dec2eb063a651d794528257ed30772ab9dfa18 | 392 | py | Python | users/forms.py | Rohan-cod/sch_webscr | ef954374173ac2fdb9ec067fe9b4571edfde8f2a | [
"MIT"
] | 2 | 2020-10-05T00:40:13.000Z | 2021-04-29T07:11:19.000Z | users/forms.py | Rohan-cod/COVID19_TRACKER_AND_VOICE_ASSISTANT | d1203f204a51bc93d8bb5eb8a8c722a46c54815f | [
"MIT"
] | 10 | 2020-05-21T10:30:51.000Z | 2021-04-08T21:52:42.000Z | users/forms.py | Rohan-cod/sch_webscr | ef954374173ac2fdb9ec067fe9b4571edfde8f2a | [
"MIT"
] | 4 | 2020-12-04T06:42:13.000Z | 2021-01-03T19:33:31.000Z | from django import forms
from django.contrib.auth.forms import UserCreationForm, UserChangeForm
from .models import CustomUser
class CustomUserCreationForm(UserCreationForm):
class Meta(UserCreationForm):
model = CustomUser
fields = ('username', 'email', 'age',)
class CustomUserChangeForm(UserChangeForm):
class Meta:
model = CustomUser
fields = ('username', 'email', 'age',) | 23.058824 | 70 | 0.762755 |
e49c0b45bcbf7dcad69f5425e4b95676fa5cf594 | 10,037 | py | Python | security_monkey/auth/modules.py | stackArmor/security_monkey | 1966ea47313d3ca69f3088bb9697d5cff0ffdf64 | [
"Apache-2.0"
] | null | null | null | security_monkey/auth/modules.py | stackArmor/security_monkey | 1966ea47313d3ca69f3088bb9697d5cff0ffdf64 | [
"Apache-2.0"
] | 2 | 2018-03-12T17:21:33.000Z | 2020-09-03T15:56:11.000Z | security_monkey/auth/modules.py | stackArmor/security_monkey | 1966ea47313d3ca69f3088bb9697d5cff0ffdf64 | [
"Apache-2.0"
] | 2 | 2018-06-15T16:55:11.000Z | 2020-04-30T16:26:59.000Z | import itertools
from flask import request, abort, _app_ctx_stack, redirect
from flask_security.core import AnonymousUser
from security_monkey.datastore import User
try:
from flask.ext.login import current_user
except ImportError:
current_user = None
from .models import RBACRole, RBACUserMixin
from . import anonymous
from flask import Response
import json
class AccessControlList(object):
"""
This class record rules for access controling.
"""
def __init__(self):
self._allowed = []
self._exempt = []
self.seted = False
def allow(self, role, method, resource, with_children=True):
"""Add allowing rules.
:param role: Role of this rule.
:param method: Method to allow in rule, include GET, POST, PUT etc.
:param resource: Resource also view function.
:param with_children: Allow role's children in rule as well
if with_children is `True`
"""
if with_children:
for r in role.get_children():
permission = (r.name, method, resource)
if permission not in self._allowed:
self._allowed.append(permission)
permission = (role.name, method, resource)
if permission not in self._allowed:
self._allowed.append(permission)
def exempt(self, view_func):
"""Exempt a view function from being checked permission
:param view_func: The view function exempt from checking.
"""
if not view_func in self._exempt:
self._exempt.append(view_func)
def is_allowed(self, role, method, resource):
"""Check whether role is allowed to access resource
:param role: Role to be checked.
:param method: Method to be checked.
:param resource: View function to be checked.
"""
return (role, method, resource) in self._allowed
def is_exempt(self, view_func):
"""Return whether view_func is exempted.
:param view_func: View function to be checked.
"""
return view_func in self._exempt
class _RBACState(object):
"""Records configuration for Flask-RBAC"""
def __init__(self, rbac, app):
self.rbac = rbac
self.app = app
class RBAC(object):
"""
This class implements role-based access control module in Flask.
There are two way to initialize Flask-RBAC::
app = Flask(__name__)
rbac = RBAC(app)
:param app: the Flask object
"""
_role_model = RBACRole
_user_model = RBACUserMixin
def __init__(self, app):
self.acl = AccessControlList()
self.before_acl = []
self.app = app
self.init_app(app)
def init_app(self, app):
# Add (RBAC, app) to flask extensions.
# Add hook to authenticate permission before request.
if not hasattr(app, 'extensions'):
app.extensions = {}
app.extensions['rbac'] = _RBACState(self, app)
self.acl.allow(anonymous, 'GET', app.view_functions['static'].__name__)
app.before_first_request(self._setup_acl)
app.before_request(self._authenticate)
def has_permission(self, method, endpoint, user=None):
"""Return whether the current user can access the resource.
Example::
@app.route('/some_url', methods=['GET', 'POST'])
@rbac.allow(['anonymous'], ['GET'])
def a_view_func():
return Response('Blah Blah...')
If you are not logged.
`rbac.has_permission('GET', 'a_view_func')` return True.
`rbac.has_permission('POST', 'a_view_func')` return False.
:param method: The method wait to check.
:param endpoint: The application endpoint.
:param user: user who you need to check. Current user by default.
"""
app = self.get_app()
_user = user or current_user
roles = _user.get_roles()
view_func = app.view_functions[endpoint]
return self._check_permission(roles, method, view_func)
def check_perm(self, role, method, callback=None):
def decorator(view_func):
if not self._check_permission([role], method, view_func):
if callable(callback):
callback()
else:
self._deny_hook()
return view_func
return decorator
def allow(self, roles, methods, with_children=True):
"""Decorator: allow roles to access the view func with it.
:param roles: List, each name of roles. Please note that,
`anonymous` is refered to anonymous.
If you add `anonymous` to the rule,
everyone can access the resource,
unless you deny other roles.
:param methods: List, each name of methods.
methods is valid in ['GET', 'POST', 'PUT', 'DELETE']
:param with_children: Whether allow children of roles as well.
True by default.
"""
def decorator(view_func):
_methods = [m.upper() for m in methods]
for r, m, v in itertools.product(roles, _methods, [view_func.__name__]):
self.before_acl.append((r, m, v, with_children))
return view_func
return decorator
def exempt(self, view_func):
"""
Decorator function
Exempt a view function from being checked permission.
"""
self.acl.exempt(view_func.__name__)
return view_func
def get_app(self, reference_app=None):
"""
Helper to look up an app.
"""
if reference_app is not None:
return reference_app
if self.app is not None:
return self.app
ctx = _app_ctx_stack.top
if ctx is not None:
return ctx.app
raise RuntimeError('application not registered on rbac '
'instance and no application bound '
'to current context')
def _authenticate(self):
app = self.get_app()
assert app, "Please initialize your application into Flask-RBAC."
assert self._role_model, "Please set role model before authenticate."
assert self._user_model, "Please set user model before authenticate."
user = current_user
if not isinstance(user._get_current_object(), self._user_model) and not isinstance(user._get_current_object(), AnonymousUser):
raise TypeError(
"%s is not an instance of %s" %
(user, self._user_model.__class__))
endpoint = request.endpoint
resource = app.view_functions.get(endpoint, None)
if not resource:
abort(404)
method = request.method
if not hasattr(user, 'get_roles'):
roles = [anonymous]
else:
roles = user.get_roles()
permit = self._check_permission(roles, method, resource)
if not permit:
return self._deny_hook(resource=resource)
if hasattr(user,'id') and 'change' not in endpoint: # If user is authenticated
# Redirect user to change password page, if the password has expired.
# Password Expires in 60 Days
return self._check_password_expiry()
def _check_password_expiry(self):
from security_monkey.datastore import UserPasswordHistory
import datetime
pw_recs = UserPasswordHistory.query.filter(UserPasswordHistory.user_id == current_user.id).order_by(
UserPasswordHistory.changed_at.desc()).limit(1).all()
if len(pw_recs) > 0:
rec = pw_recs[0]
pw_lastchange_dt = rec.changed_at
pw_expirydate = pw_lastchange_dt + datetime.timedelta(days=60)
if not pw_expirydate > datetime.datetime.utcnow():
status = 401
# abort(status)
auth_dict = {
"authenticated": True,
"change_password": True
}
return Response(response=json.dumps({"auth": auth_dict}), status=status, mimetype="application/json")
return
def _check_permission(self, roles, method, resource):
resource = resource.__name__
if self.acl.is_exempt(resource):
return True
if not self.acl.seted:
self._setup_acl()
_roles = set()
_methods = {'*', method}
_resources = {None, resource}
_roles.add(anonymous)
_roles.update(roles)
for r, m, res in itertools.product(_roles, _methods, _resources):
if self.acl.is_allowed(r.name, m, res):
return True
return False
def _deny_hook(self, resource=None):
app = self.get_app()
if current_user.is_authenticated:
status = 403
else:
status = 401
#abort(status)
if app.config.get('FRONTED_BY_NGINX'):
url = "https://{}:{}{}".format(app.config.get('FQDN'), app.config.get('NGINX_PORT'), '/login')
else:
url = "http://{}:{}{}".format(app.config.get('FQDN'), app.config.get('API_PORT'), '/login')
if current_user.is_authenticated:
auth_dict = {
"authenticated": True,
"user": current_user.email,
"roles": current_user.role,
}
else:
auth_dict = {
"authenticated": False,
"user": None,
"url": url
}
return Response(response=json.dumps({"auth": auth_dict}), status=status, mimetype="application/json")
def _setup_acl(self):
for rn, method, resource, with_children in self.before_acl:
role = self._role_model.get_by_name(rn)
self.acl.allow(role, method, resource, with_children)
self.acl.seted = True
| 33.345515 | 134 | 0.591511 |
95c45136f204b517d76398208ffdeff0b403fa70 | 7,339 | py | Python | src/poetry/console/commands/plugin/add.py | danieleades/poetry | 9957f6faa3c311533456104bbf35aadce420c32f | [
"MIT"
] | 1 | 2020-02-01T07:13:05.000Z | 2020-02-01T07:13:05.000Z | src/poetry/console/commands/plugin/add.py | sthagen/poetry | 3e52c7febeef8bf60ea07690a890e547cf3bdd1b | [
"MIT"
] | 1 | 2021-12-31T19:44:26.000Z | 2022-03-08T20:52:13.000Z | src/poetry/console/commands/plugin/add.py | Anselmoo/poetry | f6022eade7485a3b017ef0c8060dffed12e3cdb2 | [
"MIT"
] | 2 | 2020-12-07T04:26:21.000Z | 2021-09-25T21:46:36.000Z | import os
from typing import Dict
from typing import List
from typing import cast
from cleo.helpers import argument
from cleo.helpers import option
from poetry.console.application import Application
from poetry.console.commands.init import InitCommand
from poetry.console.commands.update import UpdateCommand
class PluginAddCommand(InitCommand):
name = "plugin add"
description = "Adds new plugins."
arguments = [
argument("plugins", "The names of the plugins to install.", multiple=True),
]
options = [
option(
"dry-run",
None,
"Output the operations but do not execute anything (implicitly enables --verbose).",
)
]
help = """
The <c1>plugin add</c1> command installs Poetry plugins globally.
It works similarly to the <c1>add</c1> command:
If you do not specify a version constraint, poetry will choose a suitable one based on the available package versions.
You can specify a package in the following forms:
- A single name (<b>requests</b>)
- A name and a constraint (<b>requests@^2.23.0</b>)
- A git url (<b>git+https://github.com/python-poetry/poetry.git</b>)
- A git url with a revision (<b>git+https://github.com/python-poetry/poetry.git#develop</b>)
- A git SSH url (<b>git+ssh://github.com/python-poetry/poetry.git</b>)
- A git SSH url with a revision (<b>git+ssh://github.com/python-poetry/poetry.git#develop</b>)
- A file path (<b>../my-package/my-package.whl</b>)
- A directory (<b>../my-package/</b>)
- A url (<b>https://example.com/packages/my-package-0.1.0.tar.gz</b>)\
"""
def handle(self) -> int:
from pathlib import Path
import tomlkit
from cleo.io.inputs.string_input import StringInput
from cleo.io.io import IO
from poetry.core.pyproject.toml import PyProjectTOML
from poetry.core.semver.helpers import parse_constraint
from poetry.factory import Factory
from poetry.packages.project_package import ProjectPackage
from poetry.repositories.installed_repository import InstalledRepository
from poetry.utils.env import EnvManager
plugins = self.argument("plugins")
# Plugins should be installed in the system env to be globally available
system_env = EnvManager.get_system_env(naive=True)
env_dir = Path(
os.getenv("POETRY_HOME") if os.getenv("POETRY_HOME") else system_env.path
)
# We check for the plugins existence first.
if env_dir.joinpath("pyproject.toml").exists():
pyproject = tomlkit.loads(
env_dir.joinpath("pyproject.toml").read_text(encoding="utf-8")
)
poetry_content = pyproject["tool"]["poetry"]
existing_packages = self.get_existing_packages_from_input(
plugins, poetry_content, "dependencies"
)
if existing_packages:
self.notify_about_existing_packages(existing_packages)
plugins = [plugin for plugin in plugins if plugin not in existing_packages]
if not plugins:
return 0
plugins = self._determine_requirements(plugins)
# We retrieve the packages installed in the system environment.
# We assume that this environment will be a self contained virtual environment
# built by the official installer or by pipx.
# If not, it might lead to side effects since other installed packages
# might not be required by Poetry but still taken into account when resolving dependencies.
installed_repository = InstalledRepository.load(
system_env, with_dependencies=True
)
root_package = None
for package in installed_repository.packages:
if package.name == "poetry":
root_package = ProjectPackage(package.name, package.version)
for dependency in package.requires:
root_package.add_dependency(dependency)
break
root_package.python_versions = ".".join(
str(v) for v in system_env.version_info[:3]
)
# We create a `pyproject.toml` file based on all the information
# we have about the current environment.
if not env_dir.joinpath("pyproject.toml").exists():
Factory.create_pyproject_from_package(root_package, env_dir)
# We add the plugins to the dependencies section of the previously
# created `pyproject.toml` file
pyproject = PyProjectTOML(env_dir.joinpath("pyproject.toml"))
poetry_content = pyproject.poetry_config
poetry_dependency_section = poetry_content["dependencies"]
plugin_names = []
for plugin in plugins:
if "version" in plugin:
# Validate version constraint
parse_constraint(plugin["version"])
constraint = tomlkit.inline_table()
for name, value in plugin.items():
if name == "name":
continue
constraint[name] = value
if len(constraint) == 1 and "version" in constraint:
constraint = constraint["version"]
poetry_dependency_section[plugin["name"]] = constraint
plugin_names.append(plugin["name"])
pyproject.save()
# From this point forward, all the logic will be deferred to
# the update command, by using the previously created `pyproject.toml`
# file.
application = cast(Application, self.application)
update_command: UpdateCommand = cast(UpdateCommand, application.find("update"))
# We won't go through the event dispatching done by the application
# so we need to configure the command manually
update_command.set_poetry(Factory().create_poetry(env_dir))
update_command.set_env(system_env)
application._configure_installer(update_command, self._io)
argv = ["update"] + plugin_names
if self.option("dry-run"):
argv.append("--dry-run")
return update_command.run(
IO(
StringInput(" ".join(argv)),
self._io.output,
self._io.error_output,
)
)
def get_existing_packages_from_input(
self, packages: List[str], poetry_content: Dict, target_section: str
) -> List[str]:
existing_packages = []
for name in packages:
for key in poetry_content[target_section]:
if key.lower() == name.lower():
existing_packages.append(name)
return existing_packages
def notify_about_existing_packages(self, existing_packages: List[str]) -> None:
self.line(
"The following plugins are already present in the "
"<c2>pyproject.toml</c2> file and will be skipped:\n"
)
for name in existing_packages:
self.line(f" • <c1>{name}</c1>")
self.line(
"\nIf you want to update it to the latest compatible version, "
"you can use `<c2>poetry plugin update package</c2>`.\n"
"If you prefer to upgrade it to the latest available version, "
"you can use `<c2>poetry plugin add package@latest</c2>`.\n"
)
| 37.065657 | 118 | 0.637553 |
63160fb449ff5a5eb845bb01618d147fbf6e5606 | 512 | py | Python | Proyecto/Prueba.py | luisgerardoperalestorres/EstanciasI | 2d9add096919d9911a0d40ad8bcabce4f6a24b40 | [
"MIT"
] | null | null | null | Proyecto/Prueba.py | luisgerardoperalestorres/EstanciasI | 2d9add096919d9911a0d40ad8bcabce4f6a24b40 | [
"MIT"
] | null | null | null | Proyecto/Prueba.py | luisgerardoperalestorres/EstanciasI | 2d9add096919d9911a0d40ad8bcabce4f6a24b40 | [
"MIT"
] | null | null | null | from EstanciasI import *
Hacia='estanciaupv@gmail.com'
Mensaje="Hola a todos y muchos Saludos"
Nombre ='Vida salvaje'
Ubicacion ='Mexico'
Describcion ='Solo para gatos'
year =int('2019')
mes =int('09')
dia =int('30')
hora =int('12')
minu =int('00')
Conexion=ServicioGoogle()
Servicio=Conexion.accesoGoogleApi()
Conexion.CrearMensaje(Hacia, Servicio, Conexion.accesoGmail(), Mensaje)
Conexion.CrearEvento(Nombre, Ubicacion, Describcion, year, mes, dia, hora, minu, Servicio)
Conexion.ConsultarEventos(Servicio)
| 26.947368 | 90 | 0.763672 |
e34ca6e40ff2af6fcae9277ae1f02509e7549797 | 2,122 | py | Python | consec_factors.py | marchcarax/Exercises_python | f63a9f214750c5327cad792bfdcd3813b4659718 | [
"MIT"
] | null | null | null | consec_factors.py | marchcarax/Exercises_python | f63a9f214750c5327cad792bfdcd3813b4659718 | [
"MIT"
] | null | null | null | consec_factors.py | marchcarax/Exercises_python | f63a9f214750c5327cad792bfdcd3813b4659718 | [
"MIT"
] | null | null | null | '''
Problem 47
The first two consecutive numbers to have two distinct prime factors are:
14 = 2 × 7
15 = 3 × 5
The first three consecutive numbers to have three distinct prime factors are:
644 = 2² × 7 × 23
645 = 3 × 5 × 43
646 = 2 × 17 × 19.
Find the first four consecutive integers to have four distinct prime factors each. What is the first of these numbers?
ITS SUPER SLOW LOL, but works
'''
import numpy as np
from math import sqrt
from functools import reduce
#great function found in stackoverflow by agf
def factors(n):
step = 2 if n%2 else 1
return set(reduce(list.__add__,
([i, n//i] for i in range(1, int(sqrt(n))+1, step) if n % i == 0)))
def distinct_factors(l: set):
arr = np.array(list(l)).astype(np.int32)
for i in range(1, len(arr)):
if np.isin(sqrt(arr[i]), arr):
arr[i] = 0
if arr[i] != 0:
if len(list(factors(arr[i]))) > 3:
arr[i] = 0
a = arr[arr != 0]
return a
def find_distinct_factors():
i = 600 #we already know it has to be higher than 644
number_factors = 4
while True:
j = 1
fact = distinct_factors(factors(i))
if (len(fact) - 1) == number_factors:
fact_fut3 = distinct_factors(factors(i+3))
if (len(fact_fut3) - 1) == number_factors:
fact_fut2 = distinct_factors(factors(i+2))
if (len(fact_fut2) - 1) == number_factors:
fact_fut1 = distinct_factors(factors(i+1))
if (len(fact_fut1) - 1) == number_factors:
return i, i+1, i+2, i+3
else:
j = 2 #if future i+1 fails, begin calcs in i+2
else:
j = 3 #if future i+2 fails, begin calcs in i+3
else:
j = 4 #if future i+3 fails, begin calcs in i+4
if i == 10000000:
return i
i += j
def main():
print('factors of a: ', find_distinct_factors())
#print(distinct_factors(factors(644),644))
if __name__=='__main__':
main() | 27.205128 | 118 | 0.553252 |
c114a812ef28f762e19209f21393bee3c81b53cb | 4,721 | py | Python | ursina/prefabs/window_panel.py | bt530/ursina | 5bcc21a2f42c6ae4228bade995de7428b62b0d45 | [
"MIT"
] | 1 | 2021-07-26T17:22:51.000Z | 2021-07-26T17:22:51.000Z | ursina/prefabs/window_panel.py | bt530/ursina | 5bcc21a2f42c6ae4228bade995de7428b62b0d45 | [
"MIT"
] | 1 | 2020-12-22T16:59:04.000Z | 2020-12-22T16:59:04.000Z | ursina/prefabs/window_panel.py | bt530/ursina | 5bcc21a2f42c6ae4228bade995de7428b62b0d45 | [
"MIT"
] | null | null | null | from ursina import *
from ursina.prefabs.input_field import InputField
class Space():
def __init__(self, height=1):
self.height = height
class WindowPanel(Draggable):
def __init__(self, title='', content=[], **kwargs):
super().__init__(origin=(-0,.5), scale=(.5, Text.size*2), color=color.black)
self.content = content
self.text = title
self.popup = False
self._prev_input_field = None
self._original_scale = self.scale
for key, value in kwargs.items():
setattr(self, key ,value)
if self.text_entity:
self.text_entity.world_scale_y = 1
if content:
spacing = .25
height = 1 + spacing
if isinstance(content, dict):
content = content.values()
for c in content:
# print('........', c)
if isinstance(c, Space):
height += c.height
if isinstance(c, Entity):
c.world_parent = self
c.y = -height
c.z = 0
if isinstance(c, InputField):
if self._prev_input_field:
self._prev_input_field.next_field = c
self._prev_input_field = c
if isinstance(c, Text):
c.origin = (-.5, .5)
c.x = -.48
height += len(c.lines)
elif isinstance(c, Button):
c.world_parent = self
c.scale = (.9, 1)
if hasattr(c, 'height'):
c.scale_y = height
c.model = Quad(aspect=c.world_scale_x/c.world_scale_y)
height += c.scale_y
# c.y -= c.scale_y/2
elif isinstance(c, Slider):
c.world_parent = self
c.x = -.5 * .9
c.scale = (.9*2, 20)
height += c.scale_y
elif hasattr(c, 'scale_y'):
height += c.scale_y
if hasattr(c, 'text_entity') and c.text_entity is not None:
c.text_entity.world_scale = (1,1,1)
height += spacing
self.panel = Panel(parent=self, scale_y=height, model=Quad(), origin=(0, .5), z=.1, color=self.color.tint(.1))
self.panel.model = Quad(aspect=self.panel.world_scale_x/self.panel.world_scale_y, radius=.025)
self.panel.origin = (0, .5)
if self.popup:
self.lock_x = True
self.lock_y = True
self.bg = Button(
parent=self,
z=.1,
scale=(999, 999),
color=color.black66,
highlight_color=color.black66,
pressed_color=color.black66,
)
self.bg.on_click = self.close
def on_enable(self):
if self.popup:
self.bg.enabled = True
self.animate_scale(self._original_scale, duration=.1)
def close(self):
self.bg.enabled = False
self.animate_scale_y(0, duration=.1)
invoke(setattr, self, 'enabled', False, delay=.2)
if __name__ == '__main__':
'''
WindowPanel is an easy way to create UI. It will automatically layout the content.
'''
app = Ursina()
WindowPanel(
title='Custom Window',
content=(
Text('Name:'),
InputField(name='name_field'),
Text('Age:'),
InputField(name='age_field'),
Text('Phone Number:'),
InputField(name='phone_number_field'),
# Space(height=1),
# Text('Send:'),
Button(text='Submit', color=color.azure),
Slider()
# ButtonGroup(('test', 'eslk', 'skffk'))
),
# popup=True
)
# Text(dedent('''
# [ My Window [x]]
# | Create your character
# |
# | InputField
# | [[Male][Female][Other]]
# | ButtonGroup(('male', 'female', 'other'))
# | Option1: [t] Option2: []
# |
# |[ Submit ] [ Clear ]
# '''[1:]), font='VeraMono.ttf', origin=(0,0)
# )
# WindowPanel(title='My Window', (
# Text('Enter Name:'),
# InputField('name'),
# (
# Button(text='Submit', on_click='save_system.save(self.parent.input_field.value)'),
# Button(text='Clear', on_click='self.parent.input_field.value='')')
# )
# ))
app.run()
| 31.473333 | 122 | 0.467062 |
c87205378b8b59715674b5dfa4f53c39a81c0605 | 464 | py | Python | app/rabbitmq/callbacks.py | amanjaiswalofficial/infinity-reads-blog | a008845ca403c4bb07666ed9f9293fffe360bdd8 | [
"MIT"
] | null | null | null | app/rabbitmq/callbacks.py | amanjaiswalofficial/infinity-reads-blog | a008845ca403c4bb07666ed9f9293fffe360bdd8 | [
"MIT"
] | null | null | null | app/rabbitmq/callbacks.py | amanjaiswalofficial/infinity-reads-blog | a008845ca403c4bb07666ed9f9293fffe360bdd8 | [
"MIT"
] | 1 | 2020-12-05T19:09:01.000Z | 2020-12-05T19:09:01.000Z | from app import queue
@queue(queue='blog-queue', type='direct', exchange='blog-exchange', routing_key='blog-key')
def blog_queue(ch, method, props, body):
"""
declare the queue of direct exchange, flask-rabbitmq will bind automatically by key.
:param ch:
:param method:
:param props:
:param body:
:return:
"""
print("simple queue => {}".format(body))
ch.basic_ack(delivery_tag=method.delivery_tag) # for acknowledgement
| 27.294118 | 91 | 0.674569 |
3b899fb5395b330b2dd6e3417ee1abe93e598a1d | 95 | py | Python | docker/version.py | shoter/docker-py | bb148380e1ea92e26c8e4dc783dd18b8a39506c0 | [
"Apache-2.0"
] | null | null | null | docker/version.py | shoter/docker-py | bb148380e1ea92e26c8e4dc783dd18b8a39506c0 | [
"Apache-2.0"
] | null | null | null | docker/version.py | shoter/docker-py | bb148380e1ea92e26c8e4dc783dd18b8a39506c0 | [
"Apache-2.0"
] | 1 | 2019-12-19T12:50:02.000Z | 2019-12-19T12:50:02.000Z | version = "2.6.0-dev"
version_info = tuple([int(d) for d in version.split("-")[0].split(".")])
| 31.666667 | 72 | 0.610526 |
a6ee8c732ee0a7c3cd53d9a30b66c519bc82a0ef | 4,541 | py | Python | robel/dclaw/screw.py | Del9fina/robel | 63dfac65932757134e5766f1e20a339efe281bc7 | [
"Apache-2.0"
] | 109 | 2019-08-29T22:55:41.000Z | 2022-03-19T18:26:37.000Z | robel/dclaw/screw.py | Del9fina/robel | 63dfac65932757134e5766f1e20a339efe281bc7 | [
"Apache-2.0"
] | 12 | 2019-11-14T05:16:00.000Z | 2021-02-21T07:49:32.000Z | robel/dclaw/screw.py | Del9fina/robel | 63dfac65932757134e5766f1e20a339efe281bc7 | [
"Apache-2.0"
] | 40 | 2019-09-29T06:50:44.000Z | 2022-03-19T18:34:20.000Z | # Copyright 2019 The ROBEL Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Screw tasks with DClaw robots.
This is continuous rotation of an object to match a target velocity.
"""
from typing import Optional
import numpy as np
from robel.dclaw.turn import BaseDClawTurn
from robel.simulation.randomize import SimRandomizer
from robel.utils.configurable import configurable
class BaseDClawScrew(BaseDClawTurn):
"""Shared logic for DClaw screw tasks."""
def __init__(self, success_threshold: float = 0.2, **kwargs):
"""Initializes the environment.
Args:
success_threshold: The difference threshold (in radians) of the
object position and the goal position within which we consider
as a sucesss.
"""
super().__init__(success_threshold=success_threshold, **kwargs)
# The target velocity is set during `_reset`.
self._target_object_vel = 0
self._desired_target_pos = 0
def _reset(self):
super()._reset()
self._desired_target_pos = self._target_object_pos
def _step(self, action: np.ndarray):
"""Applies an action to the robot."""
# Update the target object goal.
if not self._interactive:
self._desired_target_pos += self._target_object_vel * self.dt
self._set_target_object_pos(
self._desired_target_pos, unbounded=True)
super()._step(action)
@configurable(pickleable=True)
class DClawScrewFixed(BaseDClawScrew):
"""Rotates the object with a fixed initial position and velocity."""
def _reset(self):
# Start from the target and rotate at a constant velocity.
self._initial_object_pos = 0
self._set_target_object_pos(0)
self._target_object_vel = 0.5
super()._reset()
@configurable(pickleable=True)
class DClawScrewRandom(BaseDClawScrew):
"""Rotates the object with a random initial position and velocity."""
def _reset(self):
# Initial position is +/- 180 degrees.
self._initial_object_pos = self.np_random.uniform(
low=-np.pi, high=np.pi)
self._set_target_object_pos(self._initial_object_pos)
# Random target velocity.
self._target_object_vel = self.np_random.uniform(low=-0.75, high=0.75)
super()._reset()
@configurable(pickleable=True)
class DClawScrewRandomDynamics(DClawScrewRandom):
"""Rotates the object with a random initial position and velocity.
The dynamics of the simulation are randomized each episode.
"""
def __init__(self,
*args,
sim_observation_noise: Optional[float] = 0.05,
**kwargs):
super().__init__(
*args, sim_observation_noise=sim_observation_noise, **kwargs)
self._randomizer = SimRandomizer(self)
self._dof_indices = (
self.robot.get_config('dclaw').qvel_indices.tolist() +
self.robot.get_config('object').qvel_indices.tolist())
def _reset(self):
# Randomize joint dynamics.
self._randomizer.randomize_dofs(
self._dof_indices,
damping_range=(0.005, 0.1),
friction_loss_range=(0.001, 0.005),
)
self._randomizer.randomize_actuators(
all_same=True,
kp_range=(1, 3),
)
# Randomize friction on all geoms in the scene.
self._randomizer.randomize_geoms(
all_same=True,
friction_slide_range=(0.8, 1.2),
friction_spin_range=(0.003, 0.007),
friction_roll_range=(0.00005, 0.00015),
)
self._randomizer.randomize_bodies(
['mount'],
position_perturb_range=(-0.01, 0.01),
)
self._randomizer.randomize_geoms(
['mount'],
color_range=(0.2, 0.9),
)
self._randomizer.randomize_geoms(
parent_body_names=['valve'],
color_range=(0.2, 0.9),
)
super()._reset()
| 33.389706 | 78 | 0.648756 |
ae1321144ffab7a5a6f0257155f7416ef4041160 | 363 | gyp | Python | Dependencies/gyp-master/test/same-rule-output-file-name/src/subdirs.gyp | knight666/exlibris | b21b46e0c84e5c4f81f8048022cda88e7bb3dca2 | [
"MIT"
] | null | null | null | Dependencies/gyp-master/test/same-rule-output-file-name/src/subdirs.gyp | knight666/exlibris | b21b46e0c84e5c4f81f8048022cda88e7bb3dca2 | [
"MIT"
] | null | null | null | Dependencies/gyp-master/test/same-rule-output-file-name/src/subdirs.gyp | knight666/exlibris | b21b46e0c84e5c4f81f8048022cda88e7bb3dca2 | [
"MIT"
] | null | null | null | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'targets': [
{
'target_name': 'subdirs',
'type': 'none',
'dependencies': [
'subdir1/subdir1.gyp:*',
'subdir2/subdir2.gyp:*',
],
},
],
}
| 21.352941 | 73 | 0.539945 |
a9907147f49d983acec8df7b01829a158208702a | 3,956 | py | Python | rolling/map/generator/filler/simple.py | coolkat64/rolling | 4c3ee2401128e993a52ac9b52cdbd32e17728129 | [
"MIT"
] | null | null | null | rolling/map/generator/filler/simple.py | coolkat64/rolling | 4c3ee2401128e993a52ac9b52cdbd32e17728129 | [
"MIT"
] | null | null | null | rolling/map/generator/filler/simple.py | coolkat64/rolling | 4c3ee2401128e993a52ac9b52cdbd32e17728129 | [
"MIT"
] | null | null | null | # coding: utf-8
import random
import typing
from rolling.map.generator.filler.base import FillerFactory
from rolling.map.generator.filler.base import TileMapFiller
from rolling.map.generator.generator import Border
from rolling.map.generator.generator import TileMapGenerator
from rolling.map.source import WorldMapSource
from rolling.map.type.world import Beach
from rolling.map.type.world import Hill
from rolling.map.type.world import Jungle
from rolling.map.type.world import Mountain
from rolling.map.type.world import Plain
from rolling.map.type.world import Sea
from rolling.map.type.world import WorldMapTileType
from rolling.map.type.zone import DryBush
from rolling.map.type.zone import RockyGround
from rolling.map.type.zone import Sand
from rolling.map.type.zone import SeaWater
from rolling.map.type.zone import ShortGrass
from rolling.map.type.zone import ZoneMapTileType
class SimpleTileMapFiller(TileMapFiller):
def __init__(
self,
row_i: int,
col_i: int,
world_map_source: WorldMapSource,
distribution: typing.List[typing.Tuple[float, typing.Type[ZoneMapTileType]]],
) -> None:
self._row_i = row_i
self._col_i = col_i
self._world_map_source = world_map_source
self._tiles: typing.List[typing.Type[ZoneMapTileType]] = [td[1] for td in distribution]
self._probabilities: typing.List[float] = [td[0] for td in distribution]
def get_char(
self,
tile_map_generator: TileMapGenerator,
is_border: bool,
distance_from_border: typing.Optional[int],
border: typing.Optional[Border] = None,
) -> str:
tile_type = random.choices(self._tiles, weights=self._probabilities)[0]
return tile_map_generator.kernel.tile_map_legend.get_str_with_type(tile_type)
class SeaTileMapFiller(SimpleTileMapFiller):
def __init__(self, row_i: int, col_i: int, world_map_source: WorldMapSource) -> None:
super().__init__(row_i, col_i, world_map_source, distribution=[(1.0, SeaWater)])
class MountainTileMapFiller(SimpleTileMapFiller):
def __init__(self, row_i: int, col_i: int, world_map_source: WorldMapSource) -> None:
super().__init__(row_i, col_i, world_map_source, distribution=[(1.0, RockyGround)])
class JungleTileMapFiller(SimpleTileMapFiller):
def __init__(self, row_i: int, col_i: int, world_map_source: WorldMapSource) -> None:
super().__init__(row_i, col_i, world_map_source, distribution=[(1.0, ShortGrass)])
class HillTileMapFiller(SimpleTileMapFiller):
def __init__(self, row_i: int, col_i: int, world_map_source: WorldMapSource) -> None:
super().__init__(row_i, col_i, world_map_source, distribution=[(1.0, ShortGrass)])
class BeachTileMapFiller(SimpleTileMapFiller):
def __init__(self, row_i: int, col_i: int, world_map_source: WorldMapSource) -> None:
super().__init__(
row_i, col_i, world_map_source, distribution=[(1.0, Sand), (0.05, DryBush)]
)
class PlainTileMapFiller(SimpleTileMapFiller):
def __init__(self, row_i: int, col_i: int, world_map_source: WorldMapSource) -> None:
super().__init__(row_i, col_i, world_map_source, distribution=[(1.0, ShortGrass)])
class SimpleFillerFactory(FillerFactory):
def __init__(self) -> None:
self._matches: typing.Dict[WorldMapTileType, typing.Type[SimpleTileMapFiller]] = {
Sea: SeaTileMapFiller,
Mountain: MountainTileMapFiller,
Jungle: JungleTileMapFiller,
Hill: HillTileMapFiller,
Beach: BeachTileMapFiller,
Plain: PlainTileMapFiller,
}
def create(
self,
world_map_tile_type: WorldMapTileType,
row_i: int,
col_i: int,
world_map_source: WorldMapSource,
) -> TileMapFiller:
return self._matches[world_map_tile_type](
row_i=row_i, col_i=col_i, world_map_source=world_map_source
)
| 38.407767 | 95 | 0.719666 |
86220df4501b3ed2cd7b33dedde4114e143b4e0e | 767 | py | Python | examples/mission_import.py | TSC21/MAVSDK-Python | 74178f008af161c180a2f964cbfcdc343c5a25f4 | [
"BSD-3-Clause"
] | null | null | null | examples/mission_import.py | TSC21/MAVSDK-Python | 74178f008af161c180a2f964cbfcdc343c5a25f4 | [
"BSD-3-Clause"
] | null | null | null | examples/mission_import.py | TSC21/MAVSDK-Python | 74178f008af161c180a2f964cbfcdc343c5a25f4 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python3
import asyncio
from mavsdk import System
import mavsdk.mission_raw
async def run():
drone = System()
await drone.connect(system_address="udp://:14540")
print("Waiting for drone to connect...")
async for state in drone.core.connection_state():
if state.is_connected:
print("Drone discovered!")
break
mission_import_data = await drone.mission_raw.import_qgroundcontrol_mission("example-mission.plan")
print(f"{len(mission_import_data.mission_items)} mission items imported")
await drone.mission_raw.upload_mission(mission_import_data.mission_items)
print("Mission uploaded")
if __name__ == "__main__":
loop = asyncio.get_event_loop()
loop.run_until_complete(run())
| 26.448276 | 103 | 0.718383 |
a6b2b7b88d7adc0671086cf894bdd08dab2dd3bb | 1,104 | py | Python | apis/voting/serializers.py | computablelabs/capi | 44e349fa3c71c8d2d390cdf2a5b7b8892807b40a | [
"MIT"
] | null | null | null | apis/voting/serializers.py | computablelabs/capi | 44e349fa3c71c8d2d390cdf2a5b7b8892807b40a | [
"MIT"
] | 43 | 2019-09-03T14:50:23.000Z | 2019-12-18T17:30:11.000Z | apis/voting/serializers.py | computablelabs/capi | 44e349fa3c71c8d2d390cdf2a5b7b8892807b40a | [
"MIT"
] | 1 | 2019-10-15T14:41:28.000Z | 2019-10-15T14:41:28.000Z | from flask_restplus import Model, fields
from apis.serializers import Listing
Candidate = Model('Candidate', {
'kind': fields.Integer(required=True, description='Type of this candidate'),
'owner': fields.String(required=True, description='Proposer of this candidate'),
'stake': fields.Integer(required=True, description='Amount of CMT Wei staked by the owner of this candidate'),
'vote_by': fields.Integer(required=True, description='When the poll for this candidate closes'), # leaving as int as not to convert
'yea': fields.Integer(required=True, description='Votes in support of this candidate'),
'nay': fields.Integer(required=True, description='Votes opposed to this candidate')
})
Candidates = Model('Candidates', {
'items': fields.List(fields.String(required=True, description='Hash identifier of the candidate')),
'from_block': fields.Integer(required=True, description='Block number where scanning began'),
'to_block': fields.Integer(required=True, description='Highest block number scanned')
})
Applicant = Listing.inherit('Candidate', Candidate)
| 55.2 | 135 | 0.744565 |
769009b6ada646a7e1ea87b6efd704ab12448c84 | 2,237 | py | Python | trax/tf_numpy/numpy/tests/backprop_test.py | yakovkeselman/trax | 615432bbc58ffb5bdf83a771e8f8b470995456db | [
"Apache-2.0"
] | 1 | 2020-05-30T15:19:39.000Z | 2020-05-30T15:19:39.000Z | trax/tf_numpy/numpy/tests/backprop_test.py | ZachT1711/trax | a0a3dd8d49e53fc48bb24cc08c10a8a53517e7bc | [
"Apache-2.0"
] | null | null | null | trax/tf_numpy/numpy/tests/backprop_test.py | ZachT1711/trax | a0a3dd8d49e53fc48bb24cc08c10a8a53517e7bc | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# Copyright 2020 The Trax Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for backpropgration on tf-numpy functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow.compat.v2 as tf
from trax.tf_numpy.numpy import array_ops
# Required for operator overloads
from trax.tf_numpy.numpy import math_ops # pylint: disable=unused-import
class BackpropTest(tf.test.TestCase):
def test_setitem(self):
# Single integer index.
a = array_ops.array([1., 2., 3.])
b = array_ops.array(5.)
c = array_ops.array(10.)
tensors = [arr.data for arr in [a, b, c]]
with tf.GradientTape() as g:
g.watch(tensors)
a[1] = b + c
loss = array_ops.sum(a)
gradients = g.gradient(loss.data, tensors)
self.assertSequenceEqual(
array_ops.array(gradients[0]).tolist(), [1., 0., 1.])
self.assertEqual(array_ops.array(gradients[1]).tolist(), 1.)
self.assertEqual(array_ops.array(gradients[2]).tolist(), 1.)
# Tuple index.
a = array_ops.array([[[1., 2.], [3., 4.]], [[5., 6.],
[7., 8.]]]) # 2x2x2 array.
b = array_ops.array([10., 11.])
tensors = [arr.data for arr in [a, b]]
with tf.GradientTape() as g:
g.watch(tensors)
a[(1, 0)] = b
loss = array_ops.sum(a)
gradients = g.gradient(loss.data, tensors)
self.assertSequenceEqual(
array_ops.array(gradients[0]).tolist(),
[[[1., 1.], [1., 1.]], [[0., 0.], [1., 1.]]])
self.assertEqual(array_ops.array(gradients[1]).tolist(), [1., 1.])
if __name__ == '__main__':
tf.compat.v1.enable_eager_execution()
tf.test.main()
| 31.957143 | 75 | 0.654895 |
31bd1ffd2ca6ae75efcd4a8b84229efdc4e80ac0 | 1,050 | py | Python | python2/koans/triangle.py | danielklintworth/python_koans | 2f8ea0dc5940f539c48899a4e75116186d55d9b6 | [
"MIT"
] | 1 | 2017-05-21T11:47:41.000Z | 2017-05-21T11:47:41.000Z | python2/koans/triangle.py | danielklintworth/python_koans | 2f8ea0dc5940f539c48899a4e75116186d55d9b6 | [
"MIT"
] | null | null | null | python2/koans/triangle.py | danielklintworth/python_koans | 2f8ea0dc5940f539c48899a4e75116186d55d9b6 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Triangle Project Code.
# triangle(a, b, c) analyzes the lengths of the sides of a triangle
# (represented by a, b and c) and returns the type of triangle.
#
# It returns:
# 'equilateral' if all sides are equal
# 'isosceles' if exactly 2 sides are equal
# 'scalene' if no sides are equal
#
# The tests for this method can be found in
# about_triangle_project.py
# and
# about_triangle_project_2.py
#
def triangle(a, b, c):
# DELETE 'PASS' AND WRITE THIS CODE
if a >= b+c or b >= a+c or c >= a+b:
raise TriangleError
sides = {a, b, c}
if len(sides) == 1: # if all 3 are equal, 2 should be ignored in the set, leaving just 1
return 'equilateral'
elif len(sides) == 2: # if two are equal, there must be 2 distinct values left
return 'isosceles'
elif len(sides) == 3: # if no sides are equal, we will get three distinct values
return 'scalene'
# Error class used in part 2. No need to change this code.
class TriangleError(StandardError):
pass
| 27.631579 | 92 | 0.665714 |
07e3a905fc05759d82588608f192fc66a658be0a | 3,258 | py | Python | examples/ITEAD-OLED_shapes.py | gooligumelec/Gooligum-Adafruit_Python_SSD1306 | 94db7e22e0579b62d4c052f82ebcbf93245f6a14 | [
"MIT"
] | 1 | 2015-12-14T03:18:46.000Z | 2015-12-14T03:18:46.000Z | examples/ITEAD-OLED_shapes.py | gooligumelec/Gooligum-Adafruit_Python_SSD1306 | 94db7e22e0579b62d4c052f82ebcbf93245f6a14 | [
"MIT"
] | null | null | null | examples/ITEAD-OLED_shapes.py | gooligumelec/Gooligum-Adafruit_Python_SSD1306 | 94db7e22e0579b62d4c052f82ebcbf93245f6a14 | [
"MIT"
] | null | null | null | #!/usr/bin/python
#
# Example of displaying text and drawing shapes on an "ITEAD RPi OLED v2.0" display
#
# Based on Adafruit_Python_SSD1306/examples/shapes.py example
# v1.0 22/10/15
#
# David Meiklejohn
# Gooligum Electronics
#
# --------
# Copyright (c) 2014 Adafruit Industries
# Author: Tony DiCola
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import time
import Adafruit_SSD1306
import Image
import ImageDraw
import ImageFont
# ITEAD RPi OLED v2.0 pin configuration:
RST = 4
# use the 128x64 display (with hardware I2C) driver:
disp = Adafruit_SSD1306.SSD1306_128_64(rst=RST)
# Initialize library.
disp.begin()
# Clear display.
disp.clear()
disp.display()
# Create blank image for drawing.
# Make sure to create image with mode '1' for 1-bit color.
width = disp.width
height = disp.height
image = Image.new('1', (width, height))
# Get drawing object to draw on image.
draw = ImageDraw.Draw(image)
# Draw a black filled box to clear the image.
draw.rectangle((0,0,width,height), outline=0, fill=0)
# Draw some shapes.
# First define some constants to allow easy resizing of shapes.
padding = 2
shape_width = 20
top = padding
bottom = height-padding
# Move left to right keeping track of the current x position for drawing shapes.
x = padding
# Draw an ellipse.
draw.ellipse((x, top , x+shape_width, bottom), outline=255, fill=0)
x += shape_width+padding
# Draw a rectangle.
draw.rectangle((x, top, x+shape_width, bottom), outline=255, fill=0)
x += shape_width+padding
# Draw a triangle.
draw.polygon([(x, bottom), (x+shape_width/2, top), (x+shape_width, bottom)], outline=255, fill=0)
x += shape_width+padding
# Draw an X.
draw.line((x, bottom, x+shape_width, top), fill=255)
draw.line((x, top, x+shape_width, bottom), fill=255)
x += shape_width+padding
# Load default font.
font = ImageFont.load_default()
# Alternatively load a TTF font. Make sure the .ttf font file is in the same directory as the python script!
# Some other nice fonts to try: http://www.dafont.com/bitmap.php
#font = ImageFont.truetype('Minecraftia.ttf', 8)
# Write two lines of text.
draw.text((x, top), 'Hello', font=font, fill=255)
draw.text((x, top+20), 'World!', font=font, fill=255)
# Display image.
disp.image(image)
disp.display()
| 31.631068 | 109 | 0.74248 |
ca2bf11d5c374dbf13d5c45e2752605904784d5b | 111 | py | Python | Chapter11/removing_nude_picture.py | HowToBeCalculated/Hands-On-Blockchain-for-Python-Developers | f9634259dd3dc509f36a5ccf3a5182c0d2ec79c4 | [
"MIT"
] | 62 | 2019-03-18T04:41:41.000Z | 2022-03-31T05:03:13.000Z | Chapter11/removing_nude_picture.py | HowToBeCalculated/Hands-On-Blockchain-for-Python-Developers | f9634259dd3dc509f36a5ccf3a5182c0d2ec79c4 | [
"MIT"
] | 2 | 2020-06-14T21:56:03.000Z | 2022-01-07T05:32:01.000Z | Chapter11/removing_nude_picture.py | HowToBeCalculated/Hands-On-Blockchain-for-Python-Developers | f9634259dd3dc509f36a5ccf3a5182c0d2ec79c4 | [
"MIT"
] | 42 | 2019-02-22T03:10:36.000Z | 2022-02-20T04:47:04.000Z | import ipfsapi
c = ipfsapi.connect()
c.pin_rm('QmWgMcTdPY9Rv7SCBusK1gWBRJcBi2MxNkC1yC6uvLYPwK')
c.repo_gc()
| 13.875 | 58 | 0.801802 |
04c700e4eb8bec06e762a43ec06925b6c3a689ce | 20,274 | py | Python | modules/AR_Scheduler.py | deepsphere/deepsphere-weather | a9c75de9c9852a2832883cd998efd16d6542b083 | [
"MIT"
] | 38 | 2021-03-15T15:40:25.000Z | 2022-03-10T06:36:50.000Z | modules/AR_Scheduler.py | deepsphere/deepsphere-weather | a9c75de9c9852a2832883cd998efd16d6542b083 | [
"MIT"
] | 4 | 2021-06-06T07:47:07.000Z | 2022-01-31T11:16:08.000Z | modules/AR_Scheduler.py | deepsphere/deepsphere-weather | a9c75de9c9852a2832883cd998efd16d6542b083 | [
"MIT"
] | 5 | 2021-08-07T21:41:46.000Z | 2021-12-29T13:25:46.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat Jan 23 11:31:21 2021
@author: ghiggi
"""
import numpy as np
from cycler import cycler
import matplotlib.pyplot as plt
##----------------------------------------------------------------------------.
### Check AR weights
def check_ar_weights(ar_weights):
"""Check AR weights validity."""
if isinstance(ar_weights, (int, float)):
ar_weights = [ar_weights]
if isinstance(ar_weights, list):
ar_weights = np.array(ar_weights)
if not isinstance(ar_weights, np.ndarray):
raise TypeError("Specify AR weights with a list or a numpy array.")
# Check that any intial_ar_weights is negative
if any(ar_weights < 0):
raise ValueError("AR weights must not contain negative weights.")
# Check that the last AR weight is not zero !
if ar_weights[-1] == 0:
raise ValueError("The last weight of ar_weights must not be 0.")
return ar_weights
#----------------------------------------------------------------------------.
# No AR weights update when .step()
def _ConstantStep(self):
return self.ar_weights
def _DiracDeltaStep(self):
return self.ar_weights
##----------------------------------------------------------------------------.
## Discrete weight update functions
def _StepwiseDecayStep(self):
weights = self.ar_absolute_weights[:-1]
weights = weights - self.factor
weights[weights < 0] = 0
self.ar_absolute_weights[:-1] = weights
def _StepwiseGrowthStep(self):
weight = self.ar_absolute_weights[-1]
weight = weight + self.factor
if weight > 1:
weight = 1
self.ar_absolute_weights[-1] = weight
def _StepwiseStep(self):
if self.temporary_step_count >= self.step_interval:
_StepwiseDecayStep(self)
if self.smooth_growth:
_StepwiseGrowthStep(self)
# Reset temporary_step_count
self.temporary_step_count = 0
def _HalfDecayStep(self):
weights = self.ar_absolute_weights[:-1]
weights = weights/2
self.ar_absolute_weights[:-1] = weights
def _HalfGrowthStep(self):
weight = self.ar_absolute_weights[-1]
if weight == 0:
weight = self.factor
weight = weight*2
if weight > 1:
weight = 1
self.ar_absolute_weights[-1] = weight
def _HalfStep(self):
if self.temporary_step_count >= self.step_interval:
_HalfDecayStep(self)
if self.smooth_growth:
_HalfGrowthStep(self)
# Reset temporary_step_count
self.temporary_step_count = 0
##----------------------------------------------------------------------------.
### Continous weight update functions
def _LinearDecayStep(self):
initial_weights = self.ar_absolute_initial_weights[:-1]
weights = initial_weights - self.factor*self.global_step_count_arr[:-1]
weights[weights < 0] = 0
self.ar_absolute_weights[:-1] = weights
def _LinearGrowthStep(self):
initial_weight = self.ar_absolute_initial_weights[-1]
weight = initial_weight + self.factor*self.global_step_count_arr[-1]
if weight > 1:
weight = 1
self.ar_absolute_weights[-1] = weight
def _LinearStep(self):
_LinearDecayStep(self)
if self.smooth_growth:
_LinearGrowthStep(self)
def _ExponentialDecayStep(self):
initial_weights = self.ar_absolute_initial_weights[:-1]
weights = initial_weights * np.exp(-self.factor*self.global_step_count_arr[:-1])
self.ar_absolute_weights[:-1] = weights
def _ExponentialGrowthStep(self):
weight = self.factor * np.exp(self.factor*self.global_step_count_arr[-1])
if weight > 1:
weight = 1
self.ar_absolute_weights[-1] = weight
def _ExponentialStep(self):
_ExponentialDecayStep(self)
if self.smooth_growth:
_ExponentialGrowthStep(self)
#-----------------------------------------------------------------------------.
class AR_Scheduler():
"""Autoregressive (AR) weights scheduler."""
def __init__(self,
method = "LinearStep",
factor = 0.001,
step_interval = None,
smooth_growth = True,
fixed_ar_weights = None,
initial_ar_absolute_weights = None,
initial_ar_weights = None):
"""Autoregressive (AR) weights scheduler.
Parameters
----------
smooth_growth : bool, optional
Wheter to set the new AR weight to 0 and growth it smoothly to avoid
training destabilization.
Do not apply to 'Constant' and 'DiracDelta' methods.
The default is True.
method : str, optional
Available methods: 'Constant','DiracDelta','StepwiseDecay','HalfDecay','LinearDecay','ExponentialDecay'
The default method is "DiracDelta".
Methods explanation:
Constant: Add an AR weight (with absolute value 1) when .update() is called.
DiracDelta: Add an AR weight when .update() is called and
reset to 0 the others AR weights.
StepwiseStep: When a new AR weight is added with .update(), it start to substract
'factor' from the others AR absolute weights every 'step_interval' .step() calls.
If smooth_growth=True, the new AR weight growth by step from 0 every 'step_interval' .step() calls.)
HalfStep: When a new AR weight is added with .update(), it start to half
the others AR absolute weights every 'step_interval' .step() calls.
If smooth_growth=True, the new AR weight growth by doubling from factor every 'step_interval' .step() calls.
LinearStep : When a new AR weight is added with .update(), it start to
decrease linearly (with slope '-factor') the others
AR absolute weights every .step() call.
If smooth_growth=True, the new AR weight growth linearly
starting from 0.
ExponentialStep: When a new AR weight is added with .update(), it start to
decrease exponentially (with decay rate '-factor')
the others AR absolute weights every .step() call.
If smooth_growth=True, the new AR weight growth exponentially
starting from 'factor'.
factor : float, optional
Argument required by the following methods: 'StepwiseStep','HalfStep','LinearStep','ExponentialStep'.
Regulate the decay and growth of AR absolute weights when .step() is called.
For HalfStep and ExponentialStep, is also used as first value for the new ar_weight when smooth_growth=True.
step_interval : int, optional
Argument required by the following methods: 'StepwiseStep','HalfStep'.
Specify the frequency with which the AR weights are updated with methods 'StepwiseStep' and 'HalfStep'.
Step_interval = 1 cause weight update at every .step() call.
fixed_ar_weights : list, optional
List of AR iterations for which the value AR weights must not be
modified by the step functions.
The default is None. No AR weights is fixed.
initial_ar_abolute_weights : list, optional
Specify the initial absolute AR weights.
They will be rescaled to have 1 has largest value.
If specified, initial_ar_weights must not be specified.
The default is ar_weights = [1].
initial_ar_weights : list, optional
Specify the initial normalized AR weights. (must sum up to 1).
If specified, initial_ar_abolute_weights must not be specified.
The default is ar_weights = [1].
"""
# 'StepwiseDecay' and 'HalfDecay' factor is applied to the ar_absolute weights (not the normalized ar_weights)
# 'LinearDecay','ExponentialDecay' is applied from the initial ar_absolute_weights
# TODO:
# - Implement a min_ar_weight_option? (instead of decaying to 0)
# - Increasing-Decreasing Decay ... "
# Check smooth_growth
##--------------------------------------------------------------------.
if not isinstance(smooth_growth, bool):
raise TypeError("'smooth_growth' must be either True or False.")
##--------------------------------------------------------------------.
# Check valid method
valid_method = ['Constant','DiracDelta','StepwiseStep','HalfStep','LinearStep','ExponentialStep']
if method not in valid_method:
raise ValueError("Provide a valid 'method'.")
##--------------------------------------------------------------------.
# Check fixed_ar_weights
if not isinstance(fixed_ar_weights, (type(None), np.ndarray, list)):
raise TypeError("'fixed_ar_weights' must be specified as list.")
if isinstance(fixed_ar_weights, list):
fixed_ar_weights = np.array(fixed_ar_weights)
if fixed_ar_weights is not None:
if len(fixed_ar_weights) == 0:
fixed_ar_weights = None
##---------------------------------------------------------------------.
# Check initial_ar_weights and initial_ar_absolute_weights are not both specified.
if initial_ar_weights is not None and initial_ar_absolute_weights is not None:
raise ValueError("Specify either 'initial_ar_weights' or 'initial_ar_absolute_weights'.")
# Set default ar_weights if not specified
if initial_ar_weights is None and initial_ar_absolute_weights is None:
initial_ar_weights = [1]
# Check initial_ar_weights
if initial_ar_weights is not None:
# Check AR weights validity
initial_ar_weights = check_ar_weights(initial_ar_weights)
# Check ar_weights sum up to 1
if np.sum(initial_ar_weights) != 1:
raise ValueError("'initial_ar_weights' must sum up to 1.")
# Compute AR absolute weights
# - Force the largest values to be 1
initial_ar_absolute_weights = initial_ar_weights/initial_ar_weights.max()
# Check initial_ar_absolute_weights
elif initial_ar_absolute_weights is not None:
# Check AR weights validity
initial_ar_absolute_weights = check_ar_weights(initial_ar_absolute_weights)
# - Force the maximum values to be 1
initial_ar_absolute_weights = initial_ar_absolute_weights/initial_ar_absolute_weights.max()
# Compute the normalized AR weights
initial_ar_weights = initial_ar_absolute_weights/initial_ar_absolute_weights.sum()
else:
raise NotImplementedError("This option has been not considered.")
##--------------------------------------------------------------------.
# Check that factor and step_interval are not negative
if factor is not None:
if factor < 0:
raise ValueError("Provide a factor between 0 and 1.")
if step_interval is not None:
if step_interval <= 0:
raise ValueError("'step_interval' must be an integer value equal or larger than 1.")
##---------------------------------------------------------------------.
# Check required method arguments are specified
if method in ['StepwiseStep','HalfStep']:
if step_interval is None:
raise ValueError("'{}' method requires specification of the 'step_interval' argument".format(method))
if method in ['HalfStep','StepwiseStep','LinearStep','ExponentialStep']:
if factor is None:
raise ValueError("'{}' method requires specification of the 'factor' argument".format(method))
if method in ['Constant', 'DiracDelta']:
smooth_growth = False
##---------------------------------------------------------------------.
# Count the number of AR iteration (at start)
current_ar_iterations = len(initial_ar_weights) - 1
self.current_ar_iterations = current_ar_iterations
# Set absolute AR weights
self.ar_absolute_weights = initial_ar_absolute_weights
# Set ar_weights (normalized AR weights)
self.ar_weights = initial_ar_weights
# Set initial AR absolute weights (for fixed weights) and 'LinearDecay' and 'ExponentialDecay'
self.ar_absolute_initial_weights = self.ar_absolute_weights.copy()
##--------------------------------------------------------------------.
# Add method arguments
self.method = method
self.step_interval = step_interval
self.factor = factor
self.smooth_growth = smooth_growth
self.fixed_ar_weights = fixed_ar_weights
##--------------------------------------------------------------------.
# Initialize temporary step counter
# - For 'StepwiseDecay' and 'HalfDecay' method --> step_interval
self.temporary_step_count = 0
##--------------------------------------------------------------------.
# - Initialize global step counter
# - For 'LinearDecay' and 'ExponentialDecay'
self.global_step_count_arr = np.zeros(current_ar_iterations+1)
##--------------------------------------------------------------------.
### Define the update_weights function
fun_dict = {'Constant': _ConstantStep,
'DiracDelta': _DiracDeltaStep,
'StepwiseStep': _StepwiseStep,
'HalfStep': _HalfStep,
'LinearStep': _LinearStep,
'ExponentialStep': _ExponentialStep,
}
self.update_weights = fun_dict[method]
##--------------------------------------------------------------------.
def step(self):
"""Update AR weights."""
# Update step count
self.temporary_step_count = self.temporary_step_count + 1 # for 'StepwiseDecay' and 'HalfDecay'
self.global_step_count_arr = self.global_step_count_arr + 1 # for 'LinearDecay' and 'ExponentialDecay'
##---------------------------------------------------------------------.
if self.current_ar_iterations > 0:
# - Update weights
self.update_weights(self)
# - Refix the value of fixed AR weights
if self.fixed_ar_weights is not None:
tmp_fixed_ar_weights = self.fixed_ar_weights[self.fixed_ar_weights < self.current_ar_iterations]
self.ar_absolute_weights[tmp_fixed_ar_weights] = self.ar_absolute_initial_weights[tmp_fixed_ar_weights]
##---------------------------------------------------------------------.
# Retrieve normalized AR weights (summing up to 1)
self.ar_weights = np.array(self.ar_absolute_weights)/np.sum(self.ar_absolute_weights)
def update(self):
"""Add an ar_absolute_weight with value 1."""
# Update the number of AR iterations
self.current_ar_iterations = self.current_ar_iterations + 1
# Add a new AR weight
if not self.smooth_growth: # ... with (absolute) value 1
self.ar_absolute_weights = np.append(self.ar_absolute_weights, 1)
self.ar_absolute_initial_weights = np.append(self.ar_absolute_initial_weights, 1)
else: # start at 0 (or factor for ExponentialStep, HalfStep)
# Update current last weight value (for ExponentialStep and LInearStep)
self.ar_absolute_initial_weights[-1] = self.ar_absolute_weights[-1]
# Add new weight
self.ar_absolute_initial_weights = np.append(self.ar_absolute_initial_weights, 0)
self.ar_absolute_weights = np.append(self.ar_absolute_weights, 0)
##---------------------------------------------------------------------.
# If DiracDelta weight update method is choosen, set to 0 the other weights
if self.method == "DiracDelta":
self.ar_absolute_weights[:-1] = 0
##---------------------------------------------------------------------.
# Update normalization of AR weights
self.ar_weights = np.array(self.ar_absolute_weights)/np.sum(self.ar_absolute_weights)
##---------------------------------------------------------------------.
# Update the step count array (--> For LinearDecay and ExponentialDecay)
self.global_step_count_arr[-1] = 0 # Reset the last (because will start to decay)
self.global_step_count_arr = np.append(self.global_step_count_arr, 0)
#----------------------------------------------------------------------------.
def plot_AR_scheduler(ar_scheduler,
n_updates=4,
update_every=15,
plot_absolute_ar_weights=True,
plot_normalized_ar_weights=True):
n_initial_ar_weights = len(ar_scheduler.ar_weights)
n_final_ar_weights = n_initial_ar_weights + n_updates
### Initialize dictionary
ar_weights_per_ar_iteration = {}
for i in range(n_final_ar_weights + 1):
ar_weights_per_ar_iteration[i] = {}
ar_weights_per_ar_iteration[i]['iteration'] = []
ar_weights_per_ar_iteration[i]['ar_absolute_weights'] = []
ar_weights_per_ar_iteration[i]['ar_weights'] = []
# Simulate AR weights step() and update()
iteration = 0
for u in range(n_updates+1):
for i in range(update_every+1):
current_ar_iterations = len(ar_scheduler.ar_weights) - 1
for ar_iteration in range(current_ar_iterations+1):
ar_weights_per_ar_iteration[ar_iteration]['iteration'].append(iteration)
ar_weights_per_ar_iteration[ar_iteration]['ar_absolute_weights'].append(ar_scheduler.ar_absolute_weights[ar_iteration])
ar_weights_per_ar_iteration[ar_iteration]['ar_weights'].append(ar_scheduler.ar_weights[ar_iteration])
ar_scheduler.step()
iteration = iteration + 1
ar_scheduler.update()
##------------------------------------------------------------------------.
### Visualize AR weights
method = ar_scheduler.method
prop_cycle = plt.rcParams['axes.prop_cycle']
colors = prop_cycle.by_key()['color']
custom_cycler = cycler(linestyle=['-', '--', ':', '-.','-', '--', ':', '-.','-', '--'],
color=colors)
if plot_absolute_ar_weights:
fig, ax = plt.subplots()
ax.set_prop_cycle(custom_cycler)
for ar_iteration in range(n_final_ar_weights+1):
plt.plot(ar_weights_per_ar_iteration[ar_iteration]['iteration'],
ar_weights_per_ar_iteration[ar_iteration]['ar_absolute_weights'],
antialiased = True)
ax.set_xlabel("Iteration")
plt.title("Absolute AR weights ({})".format(method))
ax.legend(labels=list(range(n_final_ar_weights+1)), loc='upper right')
plt.show()
if plot_normalized_ar_weights:
fig, ax = plt.subplots()
ax.set_prop_cycle(custom_cycler)
for ar_iteration in range(n_final_ar_weights+1):
plt.plot(ar_weights_per_ar_iteration[ar_iteration]['iteration'],
ar_weights_per_ar_iteration[ar_iteration]['ar_weights'],
antialiased = True)
ax.set_xlabel("Iteration")
plt.title("Normalized AR weights ({})".format(method))
ax.legend(labels=list(range(n_final_ar_weights+1)), loc='upper right')
plt.show()
##----------------------------------------------------------------------------.
| 49.328467 | 135 | 0.571619 |
f5fa3e697640c9794a979fe433949f054179e106 | 1,045 | py | Python | typecube/utils.py | panyam/typecube | e8fa235675b6497acd52c68286bb9e4aefc5c8d1 | [
"Apache-2.0"
] | null | null | null | typecube/utils.py | panyam/typecube | e8fa235675b6497acd52c68286bb9e4aefc5c8d1 | [
"Apache-2.0"
] | null | null | null | typecube/utils.py | panyam/typecube | e8fa235675b6497acd52c68286bb9e4aefc5c8d1 | [
"Apache-2.0"
] | null | null | null |
class FQN(object):
def __init__(self, name, namespace, ensure_namespaces_are_equal = True):
name,namespace = (name or "").strip(), (namespace or "").strip()
comps = name.split(".")
if len(comps) > 1:
n2 = comps[-1]
ns2 = ".".join(comps[:-1])
if ensure_namespaces_are_equal:
if namespace and ns2 != namespace:
assert ns2 == namespace or not namespace, "Namespaces dont match '%s' vs '%s'" % (ns2, namespace)
name,namespace = n2,ns2
fqn = None
if namespace and name:
fqn = namespace + "." + name
elif name:
fqn = name
self._name = name
self._namespace = namespace
self._fqn = fqn
@property
def parts(self):
return self._name, self._namespace, self._fqn
@property
def name(self):
return self._name
@property
def namespace(self):
return self._namespace
@property
def fqn(self):
return self._fqn
| 27.5 | 117 | 0.544498 |
c193186c242879b87354b49d293eb993d245ba4e | 379 | py | Python | exec/pynex/run_test.py | ghewgill/neon-lang | e9bd686a6c566dc6e40f2816cab34c24725847c7 | [
"MIT"
] | 74 | 2016-01-18T12:20:53.000Z | 2022-01-16T10:26:29.000Z | exec/pynex/run_test.py | ghewgill/neon-lang | e9bd686a6c566dc6e40f2816cab34c24725847c7 | [
"MIT"
] | 197 | 2015-01-02T03:50:59.000Z | 2022-01-24T05:40:39.000Z | exec/pynex/run_test.py | ghewgill/neon-lang | e9bd686a6c566dc6e40f2816cab34c24725847c7 | [
"MIT"
] | 2 | 2015-04-01T03:54:19.000Z | 2021-11-29T08:27:12.000Z | #!/usr/bin/env python3
import os
import subprocess
import sys
pynex = "exec/pynex/pynex.py"
i = 1
while i < len(sys.argv):
if sys.argv[i] == "--pynex":
i += 1
pynex = sys.argv[i]
else:
break
i += 1
fullname = sys.argv[i]
path, name = os.path.split(fullname)
subprocess.check_call([sys.executable, pynex, fullname + "x"] + sys.argv[i+1:])
| 17.227273 | 79 | 0.601583 |
c614c8715084d181ff86866074e809a7af29c11e | 4,089 | py | Python | plugin.video.yle/default.py | miok/nettiteevee-xbmc | 3cdd0a7a3e139eeb3cf6f7e1f604ffc2f639d243 | [
"Unlicense"
] | 1 | 2018-07-17T11:00:25.000Z | 2018-07-17T11:00:25.000Z | plugin.video.yle/default.py | miok/nettiteevee-xbmc | 3cdd0a7a3e139eeb3cf6f7e1f604ffc2f639d243 | [
"Unlicense"
] | null | null | null | plugin.video.yle/default.py | miok/nettiteevee-xbmc | 3cdd0a7a3e139eeb3cf6f7e1f604ffc2f639d243 | [
"Unlicense"
] | null | null | null | # vim: set fileencoding=utf8
import urllib, urllib2, sys, os
import subprocess
import feedparser
import xbmcplugin,xbmcgui,xbmc
import traceback
# plugin handle
handle = int(sys.argv[1])
teemat={
"TV-uutiset": "http://areena.yle.fi/.rss?q=uutiset&media=video",
"Uusimmat ohjelmat": "http://areena.yle.fi/tv/kaikki.rss?jarjestys=uusin&media=video",
"Ajankohtaisohjelmat": "http://areena.yle.fi/tv/dokumentit-ja-fakta/ajankohtaisohjelmat.rss",
"Asiaviihde": "http://areena.yle.fi/tv/dokumentit-ja-fakta/asiaviihde.rss",
"Luonto": "http://areena.yle.fi/tv/dokumentit-ja-fakta/luonto.rss",
"DJOrion": "http://areena.yle.fi/api/search.rss?id=1474943&media=audio&sisalto=ohjelmat",
"Parasta ennen": "http://areena.yle.fi/api/search.rss?id=1653889&media=audio&sisalto=ohjelmat"
}
def _start_media_thumbnail(self, attrsD):
context = self._getContext()
context.setdefault('media_thumbnail', [])
context['media_thumbnail'].append(attrsD)
def getKeyboard(default = "", heading = "Hakusana", hidden = False):
kboard = xbmc.Keyboard(default, heading, hidden)
kboard.doModal()
if (kboard.isConfirmed()):
return urllib.quote_plus(kboard.getText())
return default
class YleRssTitleAndUrlParser:
def __init__(self):
self.items=[]
def parse(self, url):
feedparser._FeedParserMixin._start_media_thumbnail = _start_media_thumbnail
self.f = feedparser.parse(urllib2.urlopen(url))
i = 0
for e in self.f['entries']:
if i < 2: print e
i += 2
print e.get('media_thumbnail')[0]['url']
self.items.append((e.get('title'), e.get('link'), e.get('media_thumbnail')[1]['url']))
return self.items
p=YleRssTitleAndUrlParser()
print sys.argv
request = {}
if not sys.argv[2]:
for t in sorted(teemat):
li = xbmcgui.ListItem(t, iconImage="DefaultFolder.png")
url = sys.argv[0] + "?teema=" + urllib.quote_plus(t)
xbmcplugin.addDirectoryItem(handle=handle, url=url, listitem=li, isFolder=True)
it = xbmcgui.ListItem("Haku", iconImage="DefaultSearch.png")
url = sys.argv[0] +"?search=hae"
xbmcplugin.addDirectoryItem(handle=handle, url=url, listitem=it, isFolder=True)
xbmcplugin.endOfDirectory(handle=handle, succeeded=True)
else:
for r in str(sys.argv[2]).split('?')[-1].split('&'):
key, val = r.split('=')
request[key] = urllib.unquote_plus(val)
if 'teema' in request:
teema = request['teema']
items = p.parse(teemat[teema])
for item in items:
url = sys.argv[0] + "?item_url=" + item[1]
li = xbmcgui.ListItem(item[0], iconImage=item[2])
li.setInfo('video', {'Title': item[0]})
li.setProperty("PageURL", item[1]);
xbmcplugin.addDirectoryItem(handle=handle, url=url, listitem=li, isFolder=False)
xbmcplugin.endOfDirectory(handle=handle, succeeded=True)
if 'search' in request:
hakusana = getKeyboard()
items = p.parse("http://areena.yle.fi/.rss?&q=" + hakusana)
print items
for item in items:
url = sys.argv[0] + "?item_url=" + item[1]
li = xbmcgui.ListItem(item[0], iconImage=item[2])
li.setInfo('video', {'Title': item[0]})
li.setProperty("PageURL", item[1]);
xbmcplugin.addDirectoryItem(handle=handle, url=url, listitem=li, isFolder=False)
xbmcplugin.endOfDirectory(handle=int(sys.argv[1]), succeeded=True)
if 'item_url' in request:
print request
item_url = request['item_url']
try:
os.remove("/tmp/areenasub.fin.srt")
except:
pass
print item_url, "jeejejejeej"
pp = subprocess.check_output(['yle-dl', '--showurl', item_url])
output = pp.rstrip()
if len(output) < 2:
item_url = item_url.replace("tv", "radio")
pp = subprocess.check_output(['yle-dl', '--showurl', item_url])
output = pp.rstrip()
item = xbmcgui.ListItem("YLE");
params = output.split(" ")
skip = 0
for param in params:
skip+=1
if skip != 1:
print param.split("=",1)[0],param.split("=",1)[1]
item.setProperty(param.split("=",1)[0], param.split("=",1)[1].replace(" ", ""))
xbmc.Player(xbmc.PLAYER_CORE_AUTO).play(item.getProperty("tcUrl")+"/"+item.getProperty("playpath") + output)
xbmc.Player().setSubtitles('/tmp/areenasub.fin.srt')
| 33.793388 | 109 | 0.689411 |
9055c9f0e5fb38a82dfb76d45a09335545a0e464 | 1,348 | py | Python | tronx/pyrogramx/methods/decorators/on_inline.py | TronUb/Tron | 55b5067a34cf2849913647533d7d035cab64568e | [
"MIT"
] | 4 | 2022-03-07T07:27:04.000Z | 2022-03-29T05:59:57.000Z | tronx/pyrogramx/methods/decorators/on_inline.py | TronUb/Tron | 55b5067a34cf2849913647533d7d035cab64568e | [
"MIT"
] | null | null | null | tronx/pyrogramx/methods/decorators/on_inline.py | TronUb/Tron | 55b5067a34cf2849913647533d7d035cab64568e | [
"MIT"
] | 3 | 2022-03-05T15:24:51.000Z | 2022-03-14T08:48:05.000Z | from typing import Callable
import pyrogram
from pyrogram.filters import Filter
class OnInlineQuery:
def on_inline_query(
self=None,
filters=None,
group: int = 0
) -> Callable:
"""Decorator for handling inline queries.
This does the same thing as :meth:`~pyrogram.Client.add_handler` using the
:obj:`~pyrogram.handlers.InlineQueryHandler`.
Parameters:
filters (:obj:`~pyrogram.filters`, *optional*):
Pass one or more filters to allow only a subset of inline queries to be passed
in your function.
group (``int``, *optional*):
The group identifier, defaults to 0.
"""
def decorator(func: Callable) -> Callable:
if isinstance(self, pyrogram.Client):
self.add_handler(pyrogram.handlers.InlineQueryHandler(func, filters), group)
elif isinstance(self, Filter) or self is None:
if not hasattr(func, "handlers"):
func.handlers = []
func.handlers.append(
(
pyrogram.handlers.InlineQueryHandler(func, self),
group if filters is None else filters
)
)
return func
return decorator
| 30.636364 | 94 | 0.55638 |
79401ac2c02e754404a2814c0e7c0a163c086971 | 427 | py | Python | tasks.py | dmitrypol/redis101 | 49e5ddccff6b799eefc5fcaaf7e1a5335ba570a4 | [
"MIT"
] | 1 | 2019-04-17T07:46:50.000Z | 2019-04-17T07:46:50.000Z | tasks.py | dmitrypol/redis101 | 49e5ddccff6b799eefc5fcaaf7e1a5335ba570a4 | [
"MIT"
] | null | null | null | tasks.py | dmitrypol/redis101 | 49e5ddccff6b799eefc5fcaaf7e1a5335ba570a4 | [
"MIT"
] | null | null | null | import os
import time
from redis import Redis
redis_conn = Redis(host=os.environ['REDIS_HOST'], port=os.environ['REDIS_PORT'])
from rq.decorators import job
@job('default', connection=redis_conn)
def generate_report(input_param):
time.sleep(5)
return {'input_param': input_param}
@job('default', connection=redis_conn)
def download_data(input_param):
time.sleep(5)
return {'download_data': input_param} | 23.722222 | 80 | 0.744731 |
f860857a78dcfc3fa6c3b992c484a9d3d7fa0de0 | 438 | py | Python | keras/__init__.py | Jallet/keras-jl-ac-mean | 2bbc1596192fb8c3aefc4a8126482a5283574a59 | [
"MIT"
] | 1 | 2016-12-15T07:20:42.000Z | 2016-12-15T07:20:42.000Z | keras/__init__.py | Jallet/keras-jl-ac-mean | 2bbc1596192fb8c3aefc4a8126482a5283574a59 | [
"MIT"
] | null | null | null | keras/__init__.py | Jallet/keras-jl-ac-mean | 2bbc1596192fb8c3aefc4a8126482a5283574a59 | [
"MIT"
] | null | null | null | from __future__ import absolute_import
from . import backend
from . import datasets
from . import engine
from . import layers
from . import preprocessing
from . import utils
from . import wrappers
from . import callbacks
from . import constraints
from . import initializations
from . import metrics
from . import models
from . import objectives
from . import optimizers
from . import regularizers
__version__ = '1.0.3'
| 23.052632 | 39 | 0.757991 |
ee0d828a7270c417a66780f9619e15228a2911c9 | 2,695 | py | Python | imagepy/menus/Image/Adjust/threshold_plg.py | muxevola/imagepy | d6d8cbf214f47a4a545a0d283ae393a6932c4c0f | [
"BSD-4-Clause"
] | null | null | null | imagepy/menus/Image/Adjust/threshold_plg.py | muxevola/imagepy | d6d8cbf214f47a4a545a0d283ae393a6932c4c0f | [
"BSD-4-Clause"
] | null | null | null | imagepy/menus/Image/Adjust/threshold_plg.py | muxevola/imagepy | d6d8cbf214f47a4a545a0d283ae393a6932c4c0f | [
"BSD-4-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Fri Nov 18 22:56:50 2016
@author: yxl
"""
from imagepy import IPy
import numpy as np
from imagepy.core.engine import Filter
from imagepy.ui.panelconfig import ParaDialog
from imagepy.ui.widgets import HistCanvas
from ....core.manager import WindowsManager
class ThresholdDialog(ParaDialog):
def __init__(self, parent, title, lim):
ParaDialog.__init__(self, parent, title)
self.lim = lim
def para_check(self, para, key):
if key=='thr1':para['thr2'] = max(para['thr1'], para['thr2'])
if key=='thr2':para['thr1'] = min(para['thr1'], para['thr2'])
lim1 = 1.0 * (para['thr1'] - self.lim[0])/(self.lim[1]-self.lim[0])
lim2 = 1.0 * (para['thr2'] - self.lim[0])/(self.lim[1]-self.lim[0])
self.ctrl_dic['hist'].set_lim(lim1*255, lim2*255)
self.reset()
return True
class Plugin(Filter):
modal = False
title = 'Threshold'
note = ['all', 'auto_msk', 'auto_snap', 'not_channel', 'preview']
arange = (0,255)
def load(self, ips):
hist = np.histogram(self.ips.lookup(),list(range(257)))[0]
if ips.imgtype == '8-bit':
self.para = {'thr1':0, 'thr2':255}
self.view = [('hist', 'hist', hist),
('slide', 'thr1', (0,255), 0, 'Low'),
('slide', 'thr2', (0,255), 0, 'High')]
else :
self.para = {'thr1':ips.range[0], 'thr2':ips.range[1]}
self.view = [('hist', 'hist', hist,),
('slide', 'thr1', ips.range, 10, 'Low'),
('slide', 'thr2', ips.range, 10, 'High')]
self.arange = ips.range
self.lut = ips.lut
ips.lut = self.lut.copy()
return True
def show(self, temp=ThresholdDialog):
dialog = lambda win, title, lim = self.ips.range:temp(win, title, lim)
return Filter.show(self, dialog)
def cancel(self, ips):
ips.lut = self.lut
ips.update = 'pix'
def preview(self, ips, para):
ips.lut[:] = self.lut
thr1 = int((para['thr1']-self.arange[0])*(
255.0/max(1e-10, self.arange[1]-self.arange[0])))
thr2 = int((para['thr2']-self.arange[0])*(
255.0/max(1e-10, self.arange[1]-self.arange[0])))
# print(thr1, thr2)
ips.lut[:thr1] = [0,255,0]
ips.lut[thr2:] = [255,0,0]
ips.update = 'pix'
#process
def run(self, ips, snap, img, para = None):
if para == None: para = self.para
ips.lut = self.lut
img[:] = 0
img[snap>=para['thr2']] = 255
img[snap<para['thr1']] = 255
ips.range = (0, 255) | 35.460526 | 78 | 0.530983 |
75f4e99d8d824439c3806c551a9c6853fbb66678 | 243 | py | Python | python/testData/refactoring/pullup/abstractMethodDocStringPrefixPreserved.after.py | jnthn/intellij-community | 8fa7c8a3ace62400c838e0d5926a7be106aa8557 | [
"Apache-2.0"
] | 2 | 2019-04-28T07:48:50.000Z | 2020-12-11T14:18:08.000Z | python/testData/refactoring/pullup/abstractMethodDocStringPrefixPreserved.after.py | Cyril-lamirand/intellij-community | 60ab6c61b82fc761dd68363eca7d9d69663cfa39 | [
"Apache-2.0"
] | 173 | 2018-07-05T13:59:39.000Z | 2018-08-09T01:12:03.000Z | python/testData/refactoring/pullup/abstractMethodDocStringPrefixPreserved.after.py | Cyril-lamirand/intellij-community | 60ab6c61b82fc761dd68363eca7d9d69663cfa39 | [
"Apache-2.0"
] | 2 | 2020-03-15T08:57:37.000Z | 2020-04-07T04:48:14.000Z | # coding=utf-8
from abc import ABCMeta, abstractmethod
class A:
__metaclass__ = ABCMeta
@abstractmethod
def m(self, x):
u"""Юникод"""
pass
class B(A):
def m(self, x):
u"""Юникод"""
return x
| 13.5 | 39 | 0.55144 |
aad8fde8001a0241b2c07eee5eeae237e740fbcf | 43,342 | py | Python | packages/python/plotly/plotly/graph_objs/parcats/_line.py | eisenlohr/plotly.py | 3b0e3df45036cf48f772b13bcc10ce347964aefc | [
"MIT"
] | 1 | 2021-12-11T07:01:40.000Z | 2021-12-11T07:01:40.000Z | packages/python/plotly/plotly/graph_objs/parcats/_line.py | jiangrongbo/plotly.py | df19fc702b309586cc24e25373b87e8bdbb3ff60 | [
"MIT"
] | null | null | null | packages/python/plotly/plotly/graph_objs/parcats/_line.py | jiangrongbo/plotly.py | df19fc702b309586cc24e25373b87e8bdbb3ff60 | [
"MIT"
] | 1 | 2021-11-29T22:55:05.000Z | 2021-11-29T22:55:05.000Z | from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Line(_BaseTraceHierarchyType):
# class properties
# --------------------
_parent_path_str = "parcats"
_path_str = "parcats.line"
_valid_props = {
"autocolorscale",
"cauto",
"cmax",
"cmid",
"cmin",
"color",
"coloraxis",
"colorbar",
"colorscale",
"colorsrc",
"hovertemplate",
"reversescale",
"shape",
"showscale",
}
# autocolorscale
# --------------
@property
def autocolorscale(self):
"""
Determines whether the colorscale is a default palette
(`autocolorscale: true`) or the palette determined by
`line.colorscale`. Has an effect only if in `line.color`is set
to a numerical array. In case `colorscale` is unspecified or
`autocolorscale` is true, the default palette will be chosen
according to whether numbers in the `color` array are all
positive, all negative or mixed.
The 'autocolorscale' property must be specified as a bool
(either True, or False)
Returns
-------
bool
"""
return self["autocolorscale"]
@autocolorscale.setter
def autocolorscale(self, val):
self["autocolorscale"] = val
# cauto
# -----
@property
def cauto(self):
"""
Determines whether or not the color domain is computed with
respect to the input data (here in `line.color`) or the bounds
set in `line.cmin` and `line.cmax` Has an effect only if in
`line.color`is set to a numerical array. Defaults to `false`
when `line.cmin` and `line.cmax` are set by the user.
The 'cauto' property must be specified as a bool
(either True, or False)
Returns
-------
bool
"""
return self["cauto"]
@cauto.setter
def cauto(self, val):
self["cauto"] = val
# cmax
# ----
@property
def cmax(self):
"""
Sets the upper bound of the color domain. Has an effect only if
in `line.color`is set to a numerical array. Value should have
the same units as in `line.color` and if set, `line.cmin` must
be set as well.
The 'cmax' property is a number and may be specified as:
- An int or float
Returns
-------
int|float
"""
return self["cmax"]
@cmax.setter
def cmax(self, val):
self["cmax"] = val
# cmid
# ----
@property
def cmid(self):
"""
Sets the mid-point of the color domain by scaling `line.cmin`
and/or `line.cmax` to be equidistant to this point. Has an
effect only if in `line.color`is set to a numerical array.
Value should have the same units as in `line.color`. Has no
effect when `line.cauto` is `false`.
The 'cmid' property is a number and may be specified as:
- An int or float
Returns
-------
int|float
"""
return self["cmid"]
@cmid.setter
def cmid(self, val):
self["cmid"] = val
# cmin
# ----
@property
def cmin(self):
"""
Sets the lower bound of the color domain. Has an effect only if
in `line.color`is set to a numerical array. Value should have
the same units as in `line.color` and if set, `line.cmax` must
be set as well.
The 'cmin' property is a number and may be specified as:
- An int or float
Returns
-------
int|float
"""
return self["cmin"]
@cmin.setter
def cmin(self, val):
self["cmin"] = val
# color
# -----
@property
def color(self):
"""
Sets thelinecolor. It accepts either a specific color or an
array of numbers that are mapped to the colorscale relative to
the max and min values of the array or relative to `line.cmin`
and `line.cmax` if set.
The 'color' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
- A number that will be interpreted as a color
according to parcats.line.colorscale
- A list or array of any of the above
Returns
-------
str|numpy.ndarray
"""
return self["color"]
@color.setter
def color(self, val):
self["color"] = val
# coloraxis
# ---------
@property
def coloraxis(self):
"""
Sets a reference to a shared color axis. References to these
shared color axes are "coloraxis", "coloraxis2", "coloraxis3",
etc. Settings for these shared color axes are set in the
layout, under `layout.coloraxis`, `layout.coloraxis2`, etc.
Note that multiple color scales can be linked to the same color
axis.
The 'coloraxis' property is an identifier of a particular
subplot, of type 'coloraxis', that may be specified as the string 'coloraxis'
optionally followed by an integer >= 1
(e.g. 'coloraxis', 'coloraxis1', 'coloraxis2', 'coloraxis3', etc.)
Returns
-------
str
"""
return self["coloraxis"]
@coloraxis.setter
def coloraxis(self, val):
self["coloraxis"] = val
# colorbar
# --------
@property
def colorbar(self):
"""
The 'colorbar' property is an instance of ColorBar
that may be specified as:
- An instance of :class:`plotly.graph_objs.parcats.line.ColorBar`
- A dict of string/value properties that will be passed
to the ColorBar constructor
Supported dict properties:
bgcolor
Sets the color of padded area.
bordercolor
Sets the axis line color.
borderwidth
Sets the width (in px) or the border enclosing
this color bar.
dtick
Sets the step in-between ticks on this axis.
Use with `tick0`. Must be a positive number, or
special strings available to "log" and "date"
axes. If the axis `type` is "log", then ticks
are set every 10^(n*dtick) where n is the tick
number. For example, to set a tick mark at 1,
10, 100, 1000, ... set dtick to 1. To set tick
marks at 1, 100, 10000, ... set dtick to 2. To
set tick marks at 1, 5, 25, 125, 625, 3125, ...
set dtick to log_10(5), or 0.69897000433. "log"
has several special values; "L<f>", where `f`
is a positive number, gives ticks linearly
spaced in value (but not position). For example
`tick0` = 0.1, `dtick` = "L0.5" will put ticks
at 0.1, 0.6, 1.1, 1.6 etc. To show powers of 10
plus small digits between, use "D1" (all
digits) or "D2" (only 2 and 5). `tick0` is
ignored for "D1" and "D2". If the axis `type`
is "date", then you must convert the time to
milliseconds. For example, to set the interval
between ticks to one day, set `dtick` to
86400000.0. "date" also has special values
"M<n>" gives ticks spaced by a number of
months. `n` must be a positive integer. To set
ticks on the 15th of every third month, set
`tick0` to "2000-01-15" and `dtick` to "M3". To
set ticks every 4 years, set `dtick` to "M48"
exponentformat
Determines a formatting rule for the tick
exponents. For example, consider the number
1,000,000,000. If "none", it appears as
1,000,000,000. If "e", 1e+9. If "E", 1E+9. If
"power", 1x10^9 (with 9 in a super script). If
"SI", 1G. If "B", 1B.
len
Sets the length of the color bar This measure
excludes the padding of both ends. That is, the
color bar length is this length minus the
padding on both ends.
lenmode
Determines whether this color bar's length
(i.e. the measure in the color variation
direction) is set in units of plot "fraction"
or in *pixels. Use `len` to set the value.
minexponent
Hide SI prefix for 10^n if |n| is below this
number. This only has an effect when
`tickformat` is "SI" or "B".
nticks
Specifies the maximum number of ticks for the
particular axis. The actual number of ticks
will be chosen automatically to be less than or
equal to `nticks`. Has an effect only if
`tickmode` is set to "auto".
outlinecolor
Sets the axis line color.
outlinewidth
Sets the width (in px) of the axis line.
separatethousands
If "true", even 4-digit integers are separated
showexponent
If "all", all exponents are shown besides their
significands. If "first", only the exponent of
the first tick is shown. If "last", only the
exponent of the last tick is shown. If "none",
no exponents appear.
showticklabels
Determines whether or not the tick labels are
drawn.
showtickprefix
If "all", all tick labels are displayed with a
prefix. If "first", only the first tick is
displayed with a prefix. If "last", only the
last tick is displayed with a suffix. If
"none", tick prefixes are hidden.
showticksuffix
Same as `showtickprefix` but for tick suffixes.
thickness
Sets the thickness of the color bar This
measure excludes the size of the padding, ticks
and labels.
thicknessmode
Determines whether this color bar's thickness
(i.e. the measure in the constant color
direction) is set in units of plot "fraction"
or in "pixels". Use `thickness` to set the
value.
tick0
Sets the placement of the first tick on this
axis. Use with `dtick`. If the axis `type` is
"log", then you must take the log of your
starting tick (e.g. to set the starting tick to
100, set the `tick0` to 2) except when
`dtick`=*L<f>* (see `dtick` for more info). If
the axis `type` is "date", it should be a date
string, like date data. If the axis `type` is
"category", it should be a number, using the
scale where each category is assigned a serial
number from zero in the order it appears.
tickangle
Sets the angle of the tick labels with respect
to the horizontal. For example, a `tickangle`
of -90 draws the tick labels vertically.
tickcolor
Sets the tick color.
tickfont
Sets the color bar's tick label font
tickformat
Sets the tick label formatting rule using d3
formatting mini-languages which are very
similar to those in Python. For numbers, see: h
ttps://github.com/d3/d3-format/tree/v1.4.5#d3-f
ormat. And for dates see:
https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format. We add two
items to d3's date formatter: "%h" for half of
the year as a decimal number as well as "%{n}f"
for fractional seconds with n digits. For
example, *2016-10-13 09:15:23.456* with
tickformat "%H~%M~%S.%2f" would display
"09~15~23.46"
tickformatstops
A tuple of :class:`plotly.graph_objects.parcats
.line.colorbar.Tickformatstop` instances or
dicts with compatible properties
tickformatstopdefaults
When used in a template (as layout.template.dat
a.parcats.line.colorbar.tickformatstopdefaults)
, sets the default property values to use for
elements of
parcats.line.colorbar.tickformatstops
ticklabeloverflow
Determines how we handle tick labels that would
overflow either the graph div or the domain of
the axis. The default value for inside tick
labels is *hide past domain*. In other cases
the default is *hide past div*.
ticklabelposition
Determines where tick labels are drawn.
ticklen
Sets the tick length (in px).
tickmode
Sets the tick mode for this axis. If "auto",
the number of ticks is set via `nticks`. If
"linear", the placement of the ticks is
determined by a starting position `tick0` and a
tick step `dtick` ("linear" is the default
value if `tick0` and `dtick` are provided). If
"array", the placement of the ticks is set via
`tickvals` and the tick text is `ticktext`.
("array" is the default value if `tickvals` is
provided).
tickprefix
Sets a tick label prefix.
ticks
Determines whether ticks are drawn or not. If
"", this axis' ticks are not drawn. If
"outside" ("inside"), this axis' are drawn
outside (inside) the axis lines.
ticksuffix
Sets a tick label suffix.
ticktext
Sets the text displayed at the ticks position
via `tickvals`. Only has an effect if
`tickmode` is set to "array". Used with
`tickvals`.
ticktextsrc
Sets the source reference on Chart Studio Cloud
for `ticktext`.
tickvals
Sets the values at which ticks on this axis
appear. Only has an effect if `tickmode` is set
to "array". Used with `ticktext`.
tickvalssrc
Sets the source reference on Chart Studio Cloud
for `tickvals`.
tickwidth
Sets the tick width (in px).
title
:class:`plotly.graph_objects.parcats.line.color
bar.Title` instance or dict with compatible
properties
titlefont
Deprecated: Please use
parcats.line.colorbar.title.font instead. Sets
this color bar's title font. Note that the
title's font used to be set by the now
deprecated `titlefont` attribute.
titleside
Deprecated: Please use
parcats.line.colorbar.title.side instead.
Determines the location of color bar's title
with respect to the color bar. Note that the
title's location used to be set by the now
deprecated `titleside` attribute.
x
Sets the x position of the color bar (in plot
fraction).
xanchor
Sets this color bar's horizontal position
anchor. This anchor binds the `x` position to
the "left", "center" or "right" of the color
bar.
xpad
Sets the amount of padding (in px) along the x
direction.
y
Sets the y position of the color bar (in plot
fraction).
yanchor
Sets this color bar's vertical position anchor
This anchor binds the `y` position to the
"top", "middle" or "bottom" of the color bar.
ypad
Sets the amount of padding (in px) along the y
direction.
Returns
-------
plotly.graph_objs.parcats.line.ColorBar
"""
return self["colorbar"]
@colorbar.setter
def colorbar(self, val):
self["colorbar"] = val
# colorscale
# ----------
@property
def colorscale(self):
"""
Sets the colorscale. Has an effect only if in `line.color`is
set to a numerical array. The colorscale must be an array
containing arrays mapping a normalized value to an rgb, rgba,
hex, hsl, hsv, or named color string. At minimum, a mapping for
the lowest (0) and highest (1) values are required. For
example, `[[0, 'rgb(0,0,255)'], [1, 'rgb(255,0,0)']]`. To
control the bounds of the colorscale in color space,
use`line.cmin` and `line.cmax`. Alternatively, `colorscale` may
be a palette name string of the following list: Blackbody,Bluer
ed,Blues,Cividis,Earth,Electric,Greens,Greys,Hot,Jet,Picnic,Por
tland,Rainbow,RdBu,Reds,Viridis,YlGnBu,YlOrRd.
The 'colorscale' property is a colorscale and may be
specified as:
- A list of colors that will be spaced evenly to create the colorscale.
Many predefined colorscale lists are included in the sequential, diverging,
and cyclical modules in the plotly.colors package.
- A list of 2-element lists where the first element is the
normalized color level value (starting at 0 and ending at 1),
and the second item is a valid color string.
(e.g. [[0, 'green'], [0.5, 'red'], [1.0, 'rgb(0, 0, 255)']])
- One of the following named colorscales:
['aggrnyl', 'agsunset', 'algae', 'amp', 'armyrose', 'balance',
'blackbody', 'bluered', 'blues', 'blugrn', 'bluyl', 'brbg',
'brwnyl', 'bugn', 'bupu', 'burg', 'burgyl', 'cividis', 'curl',
'darkmint', 'deep', 'delta', 'dense', 'earth', 'edge', 'electric',
'emrld', 'fall', 'geyser', 'gnbu', 'gray', 'greens', 'greys',
'haline', 'hot', 'hsv', 'ice', 'icefire', 'inferno', 'jet',
'magenta', 'magma', 'matter', 'mint', 'mrybm', 'mygbm', 'oranges',
'orrd', 'oryel', 'oxy', 'peach', 'phase', 'picnic', 'pinkyl',
'piyg', 'plasma', 'plotly3', 'portland', 'prgn', 'pubu', 'pubugn',
'puor', 'purd', 'purp', 'purples', 'purpor', 'rainbow', 'rdbu',
'rdgy', 'rdpu', 'rdylbu', 'rdylgn', 'redor', 'reds', 'solar',
'spectral', 'speed', 'sunset', 'sunsetdark', 'teal', 'tealgrn',
'tealrose', 'tempo', 'temps', 'thermal', 'tropic', 'turbid',
'turbo', 'twilight', 'viridis', 'ylgn', 'ylgnbu', 'ylorbr',
'ylorrd'].
Appending '_r' to a named colorscale reverses it.
Returns
-------
str
"""
return self["colorscale"]
@colorscale.setter
def colorscale(self, val):
self["colorscale"] = val
# colorsrc
# --------
@property
def colorsrc(self):
"""
Sets the source reference on Chart Studio Cloud for `color`.
The 'colorsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["colorsrc"]
@colorsrc.setter
def colorsrc(self, val):
self["colorsrc"] = val
# hovertemplate
# -------------
@property
def hovertemplate(self):
"""
Template string used for rendering the information that appear
on hover box. Note that this will override `hoverinfo`.
Variables are inserted using %{variable}, for example "y: %{y}"
as well as %{xother}, {%_xother}, {%_xother_}, {%xother_}. When
showing info for several points, "xother" will be added to
those with different x positions from the first point. An
underscore before or after "(x|y)other" will add a space on
that side, only when this field is shown. Numbers are formatted
using d3-format's syntax %{variable:d3-format}, for example
"Price: %{y:$.2f}".
https://github.com/d3/d3-format/tree/v1.4.5#d3-format for
details on the formatting syntax. Dates are formatted using
d3-time-format's syntax %{variable|d3-time-format}, for example
"Day: %{2019-01-01|%A}". https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format for details on the date
formatting syntax. The variables available in `hovertemplate`
are the ones emitted as event data described at this link
https://plotly.com/javascript/plotlyjs-events/#event-data.
Additionally, every attributes that can be specified per-point
(the ones that are `arrayOk: true`) are available. variables
`count` and `probability`. Anything contained in tag `<extra>`
is displayed in the secondary box, for example
"<extra>{fullData.name}</extra>". To hide the secondary box
completely, use an empty tag `<extra></extra>`.
The 'hovertemplate' property is a string and must be specified as:
- A string
- A number that will be converted to a string
Returns
-------
str
"""
return self["hovertemplate"]
@hovertemplate.setter
def hovertemplate(self, val):
self["hovertemplate"] = val
# reversescale
# ------------
@property
def reversescale(self):
"""
Reverses the color mapping if true. Has an effect only if in
`line.color`is set to a numerical array. If true, `line.cmin`
will correspond to the last color in the array and `line.cmax`
will correspond to the first color.
The 'reversescale' property must be specified as a bool
(either True, or False)
Returns
-------
bool
"""
return self["reversescale"]
@reversescale.setter
def reversescale(self, val):
self["reversescale"] = val
# shape
# -----
@property
def shape(self):
"""
Sets the shape of the paths. If `linear`, paths are composed of
straight lines. If `hspline`, paths are composed of horizontal
curved splines
The 'shape' property is an enumeration that may be specified as:
- One of the following enumeration values:
['linear', 'hspline']
Returns
-------
Any
"""
return self["shape"]
@shape.setter
def shape(self, val):
self["shape"] = val
# showscale
# ---------
@property
def showscale(self):
"""
Determines whether or not a colorbar is displayed for this
trace. Has an effect only if in `line.color`is set to a
numerical array.
The 'showscale' property must be specified as a bool
(either True, or False)
Returns
-------
bool
"""
return self["showscale"]
@showscale.setter
def showscale(self, val):
self["showscale"] = val
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
autocolorscale
Determines whether the colorscale is a default palette
(`autocolorscale: true`) or the palette determined by
`line.colorscale`. Has an effect only if in
`line.color`is set to a numerical array. In case
`colorscale` is unspecified or `autocolorscale` is
true, the default palette will be chosen according to
whether numbers in the `color` array are all positive,
all negative or mixed.
cauto
Determines whether or not the color domain is computed
with respect to the input data (here in `line.color`)
or the bounds set in `line.cmin` and `line.cmax` Has
an effect only if in `line.color`is set to a numerical
array. Defaults to `false` when `line.cmin` and
`line.cmax` are set by the user.
cmax
Sets the upper bound of the color domain. Has an effect
only if in `line.color`is set to a numerical array.
Value should have the same units as in `line.color` and
if set, `line.cmin` must be set as well.
cmid
Sets the mid-point of the color domain by scaling
`line.cmin` and/or `line.cmax` to be equidistant to
this point. Has an effect only if in `line.color`is set
to a numerical array. Value should have the same units
as in `line.color`. Has no effect when `line.cauto` is
`false`.
cmin
Sets the lower bound of the color domain. Has an effect
only if in `line.color`is set to a numerical array.
Value should have the same units as in `line.color` and
if set, `line.cmax` must be set as well.
color
Sets thelinecolor. It accepts either a specific color
or an array of numbers that are mapped to the
colorscale relative to the max and min values of the
array or relative to `line.cmin` and `line.cmax` if
set.
coloraxis
Sets a reference to a shared color axis. References to
these shared color axes are "coloraxis", "coloraxis2",
"coloraxis3", etc. Settings for these shared color axes
are set in the layout, under `layout.coloraxis`,
`layout.coloraxis2`, etc. Note that multiple color
scales can be linked to the same color axis.
colorbar
:class:`plotly.graph_objects.parcats.line.ColorBar`
instance or dict with compatible properties
colorscale
Sets the colorscale. Has an effect only if in
`line.color`is set to a numerical array. The colorscale
must be an array containing arrays mapping a normalized
value to an rgb, rgba, hex, hsl, hsv, or named color
string. At minimum, a mapping for the lowest (0) and
highest (1) values are required. For example, `[[0,
'rgb(0,0,255)'], [1, 'rgb(255,0,0)']]`. To control the
bounds of the colorscale in color space, use`line.cmin`
and `line.cmax`. Alternatively, `colorscale` may be a
palette name string of the following list: Blackbody,Bl
uered,Blues,Cividis,Earth,Electric,Greens,Greys,Hot,Jet
,Picnic,Portland,Rainbow,RdBu,Reds,Viridis,YlGnBu,YlOrR
d.
colorsrc
Sets the source reference on Chart Studio Cloud for
`color`.
hovertemplate
Template string used for rendering the information that
appear on hover box. Note that this will override
`hoverinfo`. Variables are inserted using %{variable},
for example "y: %{y}" as well as %{xother}, {%_xother},
{%_xother_}, {%xother_}. When showing info for several
points, "xother" will be added to those with different
x positions from the first point. An underscore before
or after "(x|y)other" will add a space on that side,
only when this field is shown. Numbers are formatted
using d3-format's syntax %{variable:d3-format}, for
example "Price: %{y:$.2f}".
https://github.com/d3/d3-format/tree/v1.4.5#d3-format
for details on the formatting syntax. Dates are
formatted using d3-time-format's syntax
%{variable|d3-time-format}, for example "Day:
%{2019-01-01|%A}". https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format for details on the
date formatting syntax. The variables available in
`hovertemplate` are the ones emitted as event data
described at this link
https://plotly.com/javascript/plotlyjs-events/#event-
data. Additionally, every attributes that can be
specified per-point (the ones that are `arrayOk: true`)
are available. variables `count` and `probability`.
Anything contained in tag `<extra>` is displayed in the
secondary box, for example
"<extra>{fullData.name}</extra>". To hide the secondary
box completely, use an empty tag `<extra></extra>`.
reversescale
Reverses the color mapping if true. Has an effect only
if in `line.color`is set to a numerical array. If true,
`line.cmin` will correspond to the last color in the
array and `line.cmax` will correspond to the first
color.
shape
Sets the shape of the paths. If `linear`, paths are
composed of straight lines. If `hspline`, paths are
composed of horizontal curved splines
showscale
Determines whether or not a colorbar is displayed for
this trace. Has an effect only if in `line.color`is set
to a numerical array.
"""
def __init__(
self,
arg=None,
autocolorscale=None,
cauto=None,
cmax=None,
cmid=None,
cmin=None,
color=None,
coloraxis=None,
colorbar=None,
colorscale=None,
colorsrc=None,
hovertemplate=None,
reversescale=None,
shape=None,
showscale=None,
**kwargs
):
"""
Construct a new Line object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of :class:`plotly.graph_objs.parcats.Line`
autocolorscale
Determines whether the colorscale is a default palette
(`autocolorscale: true`) or the palette determined by
`line.colorscale`. Has an effect only if in
`line.color`is set to a numerical array. In case
`colorscale` is unspecified or `autocolorscale` is
true, the default palette will be chosen according to
whether numbers in the `color` array are all positive,
all negative or mixed.
cauto
Determines whether or not the color domain is computed
with respect to the input data (here in `line.color`)
or the bounds set in `line.cmin` and `line.cmax` Has
an effect only if in `line.color`is set to a numerical
array. Defaults to `false` when `line.cmin` and
`line.cmax` are set by the user.
cmax
Sets the upper bound of the color domain. Has an effect
only if in `line.color`is set to a numerical array.
Value should have the same units as in `line.color` and
if set, `line.cmin` must be set as well.
cmid
Sets the mid-point of the color domain by scaling
`line.cmin` and/or `line.cmax` to be equidistant to
this point. Has an effect only if in `line.color`is set
to a numerical array. Value should have the same units
as in `line.color`. Has no effect when `line.cauto` is
`false`.
cmin
Sets the lower bound of the color domain. Has an effect
only if in `line.color`is set to a numerical array.
Value should have the same units as in `line.color` and
if set, `line.cmax` must be set as well.
color
Sets thelinecolor. It accepts either a specific color
or an array of numbers that are mapped to the
colorscale relative to the max and min values of the
array or relative to `line.cmin` and `line.cmax` if
set.
coloraxis
Sets a reference to a shared color axis. References to
these shared color axes are "coloraxis", "coloraxis2",
"coloraxis3", etc. Settings for these shared color axes
are set in the layout, under `layout.coloraxis`,
`layout.coloraxis2`, etc. Note that multiple color
scales can be linked to the same color axis.
colorbar
:class:`plotly.graph_objects.parcats.line.ColorBar`
instance or dict with compatible properties
colorscale
Sets the colorscale. Has an effect only if in
`line.color`is set to a numerical array. The colorscale
must be an array containing arrays mapping a normalized
value to an rgb, rgba, hex, hsl, hsv, or named color
string. At minimum, a mapping for the lowest (0) and
highest (1) values are required. For example, `[[0,
'rgb(0,0,255)'], [1, 'rgb(255,0,0)']]`. To control the
bounds of the colorscale in color space, use`line.cmin`
and `line.cmax`. Alternatively, `colorscale` may be a
palette name string of the following list: Blackbody,Bl
uered,Blues,Cividis,Earth,Electric,Greens,Greys,Hot,Jet
,Picnic,Portland,Rainbow,RdBu,Reds,Viridis,YlGnBu,YlOrR
d.
colorsrc
Sets the source reference on Chart Studio Cloud for
`color`.
hovertemplate
Template string used for rendering the information that
appear on hover box. Note that this will override
`hoverinfo`. Variables are inserted using %{variable},
for example "y: %{y}" as well as %{xother}, {%_xother},
{%_xother_}, {%xother_}. When showing info for several
points, "xother" will be added to those with different
x positions from the first point. An underscore before
or after "(x|y)other" will add a space on that side,
only when this field is shown. Numbers are formatted
using d3-format's syntax %{variable:d3-format}, for
example "Price: %{y:$.2f}".
https://github.com/d3/d3-format/tree/v1.4.5#d3-format
for details on the formatting syntax. Dates are
formatted using d3-time-format's syntax
%{variable|d3-time-format}, for example "Day:
%{2019-01-01|%A}". https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format for details on the
date formatting syntax. The variables available in
`hovertemplate` are the ones emitted as event data
described at this link
https://plotly.com/javascript/plotlyjs-events/#event-
data. Additionally, every attributes that can be
specified per-point (the ones that are `arrayOk: true`)
are available. variables `count` and `probability`.
Anything contained in tag `<extra>` is displayed in the
secondary box, for example
"<extra>{fullData.name}</extra>". To hide the secondary
box completely, use an empty tag `<extra></extra>`.
reversescale
Reverses the color mapping if true. Has an effect only
if in `line.color`is set to a numerical array. If true,
`line.cmin` will correspond to the last color in the
array and `line.cmax` will correspond to the first
color.
shape
Sets the shape of the paths. If `linear`, paths are
composed of straight lines. If `hspline`, paths are
composed of horizontal curved splines
showscale
Determines whether or not a colorbar is displayed for
this trace. Has an effect only if in `line.color`is set
to a numerical array.
Returns
-------
Line
"""
super(Line, self).__init__("line")
if "_parent" in kwargs:
self._parent = kwargs["_parent"]
return
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.parcats.Line
constructor must be a dict or
an instance of :class:`plotly.graph_objs.parcats.Line`"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
self._validate = kwargs.pop("_validate", True)
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("autocolorscale", None)
_v = autocolorscale if autocolorscale is not None else _v
if _v is not None:
self["autocolorscale"] = _v
_v = arg.pop("cauto", None)
_v = cauto if cauto is not None else _v
if _v is not None:
self["cauto"] = _v
_v = arg.pop("cmax", None)
_v = cmax if cmax is not None else _v
if _v is not None:
self["cmax"] = _v
_v = arg.pop("cmid", None)
_v = cmid if cmid is not None else _v
if _v is not None:
self["cmid"] = _v
_v = arg.pop("cmin", None)
_v = cmin if cmin is not None else _v
if _v is not None:
self["cmin"] = _v
_v = arg.pop("color", None)
_v = color if color is not None else _v
if _v is not None:
self["color"] = _v
_v = arg.pop("coloraxis", None)
_v = coloraxis if coloraxis is not None else _v
if _v is not None:
self["coloraxis"] = _v
_v = arg.pop("colorbar", None)
_v = colorbar if colorbar is not None else _v
if _v is not None:
self["colorbar"] = _v
_v = arg.pop("colorscale", None)
_v = colorscale if colorscale is not None else _v
if _v is not None:
self["colorscale"] = _v
_v = arg.pop("colorsrc", None)
_v = colorsrc if colorsrc is not None else _v
if _v is not None:
self["colorsrc"] = _v
_v = arg.pop("hovertemplate", None)
_v = hovertemplate if hovertemplate is not None else _v
if _v is not None:
self["hovertemplate"] = _v
_v = arg.pop("reversescale", None)
_v = reversescale if reversescale is not None else _v
if _v is not None:
self["reversescale"] = _v
_v = arg.pop("shape", None)
_v = shape if shape is not None else _v
if _v is not None:
self["shape"] = _v
_v = arg.pop("showscale", None)
_v = showscale if showscale is not None else _v
if _v is not None:
self["showscale"] = _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
| 42.326172 | 87 | 0.550736 |
c857f3646988a8e44e345a9f0490fbbb36d1de5d | 397 | py | Python | ree/canary/lanzafuerte.py | blackleg/reescrapper | d41b8ba1b4e6750539a13b3e18eff84c1407ad0c | [
"MIT"
] | null | null | null | ree/canary/lanzafuerte.py | blackleg/reescrapper | d41b8ba1b4e6750539a13b3e18eff84c1407ad0c | [
"MIT"
] | null | null | null | ree/canary/lanzafuerte.py | blackleg/reescrapper | d41b8ba1b4e6750539a13b3e18eff84c1407ad0c | [
"MIT"
] | null | null | null | from ree.core import Scraper
class LanzaroteFuerteventura(Scraper):
def __init__(self, session=None, verify=True):
super(self.__class__, self).__init__(session, verify)
def get(self, date=None, last=True):
return super(self.__class__, self).get("LZ_FV", "Atlantic/Canary", "Canarias", date, last)
def get_all(self, date=None):
return self.get(date, False)
| 28.357143 | 98 | 0.687657 |
7490b20715dae7719c9605a9801ca380e8db23dc | 39,840 | py | Python | ucsmsdk/mometa/equipment/EquipmentSystemIOController.py | vesposito/ucsmsdk | 03bc1e19b8582c68d3a94a20a939a48fe91332f8 | [
"Apache-2.0"
] | null | null | null | ucsmsdk/mometa/equipment/EquipmentSystemIOController.py | vesposito/ucsmsdk | 03bc1e19b8582c68d3a94a20a939a48fe91332f8 | [
"Apache-2.0"
] | null | null | null | ucsmsdk/mometa/equipment/EquipmentSystemIOController.py | vesposito/ucsmsdk | 03bc1e19b8582c68d3a94a20a939a48fe91332f8 | [
"Apache-2.0"
] | null | null | null | """This module contains the general information for EquipmentSystemIOController ManagedObject."""
from ...ucsmo import ManagedObject
from ...ucscoremeta import MoPropertyMeta, MoMeta
from ...ucsmeta import VersionMeta
class EquipmentSystemIOControllerConsts:
ADMIN_POWER_STATE_CYCLE_IMMEDIATE = "cycle-immediate"
ADMIN_POWER_STATE_CYCLE_WAIT = "cycle-wait"
ADMIN_POWER_STATE_POLICY = "policy"
ADMIN_STATE_ACKNOWLEDGED = "acknowledged"
ADMIN_STATE_AUTO_ACKNOWLEDGE = "auto-acknowledge"
ADMIN_STATE_DECOMMISSION = "decommission"
ADMIN_STATE_DISABLE_PORT_CHANNEL = "disable-port-channel"
ADMIN_STATE_ENABLE_PORT_CHANNEL = "enable-port-channel"
ADMIN_STATE_RE_ACKNOWLEDGE = "re-acknowledge"
ADMIN_STATE_REMOVE = "remove"
CHASSIS_ID_N_A = "N/A"
CHECK_POINT_DEEP_CHECKPOINT = "deep-checkpoint"
CHECK_POINT_DISCOVERED = "discovered"
CHECK_POINT_REMOVING = "removing"
CHECK_POINT_SHALLOW_CHECKPOINT = "shallow-checkpoint"
CHECK_POINT_UNKNOWN = "unknown"
CONFIG_STATE_ACK_IN_PROGRESS = "ack-in-progress"
CONFIG_STATE_ACKNOWLEDGED = "acknowledged"
CONFIG_STATE_AUTO_ACK = "auto-ack"
CONFIG_STATE_EVALUATION = "evaluation"
CONFIG_STATE_OK = "ok"
CONFIG_STATE_REMOVING = "removing"
CONFIG_STATE_UN_ACKNOWLEDGED = "un-acknowledged"
CONFIG_STATE_UN_INITIALIZED = "un-initialized"
CONFIG_STATE_UNSUPPORTED_CONNECTIVITY = "unsupported-connectivity"
DISCOVERY_AUTO_UPGRADING = "auto-upgrading"
DISCOVERY_DISCOVERED = "discovered"
DISCOVERY_OFFLINE = "offline"
DISCOVERY_ONLINE = "online"
DISCOVERY_PINGLOST = "pinglost"
DISCOVERY_UNKNOWN = "unknown"
DISCOVERY_UNSECURE = "unsecure"
DISCOVERY_UNSUPPORTED_CONNECTIVITY = "unsupported-connectivity"
FSM_PREV_RESET_CMC_BEGIN = "ResetCmcBegin"
FSM_PREV_RESET_CMC_EXECUTE = "ResetCmcExecute"
FSM_PREV_RESET_CMC_FAIL = "ResetCmcFail"
FSM_PREV_RESET_CMC_SUCCESS = "ResetCmcSuccess"
FSM_PREV_NOP = "nop"
FSM_RMT_INV_ERR_CODE_ERR_2FA_AUTH_RETRY = "ERR-2fa-auth-retry"
FSM_RMT_INV_ERR_CODE_ERR_ACTIVATE_FAILED = "ERR-ACTIVATE-failed"
FSM_RMT_INV_ERR_CODE_ERR_ACTIVATE_IN_PROGRESS = "ERR-ACTIVATE-in-progress"
FSM_RMT_INV_ERR_CODE_ERR_ACTIVATE_RETRY = "ERR-ACTIVATE-retry"
FSM_RMT_INV_ERR_CODE_ERR_BIOS_TOKENS_OLD_BIOS = "ERR-BIOS-TOKENS-OLD-BIOS"
FSM_RMT_INV_ERR_CODE_ERR_BIOS_TOKENS_OLD_CIMC = "ERR-BIOS-TOKENS-OLD-CIMC"
FSM_RMT_INV_ERR_CODE_ERR_BIOS_NETWORK_BOOT_ORDER_NOT_FOUND = "ERR-BIOS-network-boot-order-not-found"
FSM_RMT_INV_ERR_CODE_ERR_BOARDCTRLUPDATE_IGNORE = "ERR-BOARDCTRLUPDATE-ignore"
FSM_RMT_INV_ERR_CODE_ERR_DIAG_CANCELLED = "ERR-DIAG-cancelled"
FSM_RMT_INV_ERR_CODE_ERR_DIAG_FSM_RESTARTED = "ERR-DIAG-fsm-restarted"
FSM_RMT_INV_ERR_CODE_ERR_DIAG_TEST_FAILED = "ERR-DIAG-test-failed"
FSM_RMT_INV_ERR_CODE_ERR_DNLD_AUTHENTICATION_FAILURE = "ERR-DNLD-authentication-failure"
FSM_RMT_INV_ERR_CODE_ERR_DNLD_HOSTKEY_MISMATCH = "ERR-DNLD-hostkey-mismatch"
FSM_RMT_INV_ERR_CODE_ERR_DNLD_INVALID_IMAGE = "ERR-DNLD-invalid-image"
FSM_RMT_INV_ERR_CODE_ERR_DNLD_NO_FILE = "ERR-DNLD-no-file"
FSM_RMT_INV_ERR_CODE_ERR_DNLD_NO_SPACE = "ERR-DNLD-no-space"
FSM_RMT_INV_ERR_CODE_ERR_DNLD_USB_UNMOUNTED = "ERR-DNLD-usb-unmounted"
FSM_RMT_INV_ERR_CODE_ERR_DNS_DELETE_ERROR = "ERR-DNS-delete-error"
FSM_RMT_INV_ERR_CODE_ERR_DNS_GET_ERROR = "ERR-DNS-get-error"
FSM_RMT_INV_ERR_CODE_ERR_DNS_SET_ERROR = "ERR-DNS-set-error"
FSM_RMT_INV_ERR_CODE_ERR_DIAGNOSTICS_IN_PROGRESS = "ERR-Diagnostics-in-progress"
FSM_RMT_INV_ERR_CODE_ERR_DIAGNOSTICS_MEMTEST_IN_PROGRESS = "ERR-Diagnostics-memtest-in-progress"
FSM_RMT_INV_ERR_CODE_ERR_DIAGNOSTICS_NETWORK_IN_PROGRESS = "ERR-Diagnostics-network-in-progress"
FSM_RMT_INV_ERR_CODE_ERR_FILTER_ILLEGAL_FORMAT = "ERR-FILTER-illegal-format"
FSM_RMT_INV_ERR_CODE_ERR_FSM_NO_SUCH_STATE = "ERR-FSM-no-such-state"
FSM_RMT_INV_ERR_CODE_ERR_HOST_FRU_IDENTITY_MISMATCH = "ERR-HOST-fru-identity-mismatch"
FSM_RMT_INV_ERR_CODE_ERR_HTTP_SET_ERROR = "ERR-HTTP-set-error"
FSM_RMT_INV_ERR_CODE_ERR_HTTPS_SET_ERROR = "ERR-HTTPS-set-error"
FSM_RMT_INV_ERR_CODE_ERR_IBMC_ANALYZE_RESULTS = "ERR-IBMC-analyze-results"
FSM_RMT_INV_ERR_CODE_ERR_IBMC_CONNECT_ERROR = "ERR-IBMC-connect-error"
FSM_RMT_INV_ERR_CODE_ERR_IBMC_CONNECTOR_INFO_RETRIEVAL_ERROR = "ERR-IBMC-connector-info-retrieval-error"
FSM_RMT_INV_ERR_CODE_ERR_IBMC_FRU_RETRIEVAL_ERROR = "ERR-IBMC-fru-retrieval-error"
FSM_RMT_INV_ERR_CODE_ERR_IBMC_INVALID_END_POINT_CONFIG = "ERR-IBMC-invalid-end-point-config"
FSM_RMT_INV_ERR_CODE_ERR_IBMC_RESULTS_NOT_READY = "ERR-IBMC-results-not-ready"
FSM_RMT_INV_ERR_CODE_ERR_MAX_SUBSCRIPTIONS_ALLOWED_ERROR = "ERR-MAX-subscriptions-allowed-error"
FSM_RMT_INV_ERR_CODE_ERR_MO_CONFIG_CHILD_OBJECT_CANT_BE_CONFIGURED = "ERR-MO-CONFIG-child-object-cant-be-configured"
FSM_RMT_INV_ERR_CODE_ERR_MO_META_NO_SUCH_OBJECT_CLASS = "ERR-MO-META-no-such-object-class"
FSM_RMT_INV_ERR_CODE_ERR_MO_PROPERTY_NO_SUCH_PROPERTY = "ERR-MO-PROPERTY-no-such-property"
FSM_RMT_INV_ERR_CODE_ERR_MO_PROPERTY_VALUE_OUT_OF_RANGE = "ERR-MO-PROPERTY-value-out-of-range"
FSM_RMT_INV_ERR_CODE_ERR_MO_ACCESS_DENIED = "ERR-MO-access-denied"
FSM_RMT_INV_ERR_CODE_ERR_MO_DELETION_RULE_VIOLATION = "ERR-MO-deletion-rule-violation"
FSM_RMT_INV_ERR_CODE_ERR_MO_DUPLICATE_OBJECT = "ERR-MO-duplicate-object"
FSM_RMT_INV_ERR_CODE_ERR_MO_ILLEGAL_CONTAINMENT = "ERR-MO-illegal-containment"
FSM_RMT_INV_ERR_CODE_ERR_MO_ILLEGAL_CREATION = "ERR-MO-illegal-creation"
FSM_RMT_INV_ERR_CODE_ERR_MO_ILLEGAL_ITERATOR_STATE = "ERR-MO-illegal-iterator-state"
FSM_RMT_INV_ERR_CODE_ERR_MO_ILLEGAL_OBJECT_LIFECYCLE_TRANSITION = "ERR-MO-illegal-object-lifecycle-transition"
FSM_RMT_INV_ERR_CODE_ERR_MO_NAMING_RULE_VIOLATION = "ERR-MO-naming-rule-violation"
FSM_RMT_INV_ERR_CODE_ERR_MO_OBJECT_NOT_FOUND = "ERR-MO-object-not-found"
FSM_RMT_INV_ERR_CODE_ERR_MO_RESOURCE_ALLOCATION = "ERR-MO-resource-allocation"
FSM_RMT_INV_ERR_CODE_ERR_NTP_DELETE_ERROR = "ERR-NTP-delete-error"
FSM_RMT_INV_ERR_CODE_ERR_NTP_GET_ERROR = "ERR-NTP-get-error"
FSM_RMT_INV_ERR_CODE_ERR_NTP_SET_ERROR = "ERR-NTP-set-error"
FSM_RMT_INV_ERR_CODE_ERR_POWER_CAP_UNSUPPORTED = "ERR-POWER-CAP-UNSUPPORTED"
FSM_RMT_INV_ERR_CODE_ERR_POWER_PROFILE_IN_PROGRESS = "ERR-POWER-PROFILE-IN-PROGRESS"
FSM_RMT_INV_ERR_CODE_ERR_SERVER_MIS_CONNECT = "ERR-SERVER-mis-connect"
FSM_RMT_INV_ERR_CODE_ERR_SWITCH_INVALID_IF_CONFIG = "ERR-SWITCH-invalid-if-config"
FSM_RMT_INV_ERR_CODE_ERR_TOKEN_REQUEST_DENIED = "ERR-TOKEN-request-denied"
FSM_RMT_INV_ERR_CODE_ERR_UNABLE_TO_FETCH_BIOS_SETTINGS = "ERR-UNABLE-TO-FETCH-BIOS-SETTINGS"
FSM_RMT_INV_ERR_CODE_ERR_UPDATE_FAILED = "ERR-UPDATE-failed"
FSM_RMT_INV_ERR_CODE_ERR_UPDATE_IN_PROGRESS = "ERR-UPDATE-in-progress"
FSM_RMT_INV_ERR_CODE_ERR_UPDATE_RETRY = "ERR-UPDATE-retry"
FSM_RMT_INV_ERR_CODE_ERR_AAA_CONFIG_MODIFY_ERROR = "ERR-aaa-config-modify-error"
FSM_RMT_INV_ERR_CODE_ERR_ACCT_REALM_SET_ERROR = "ERR-acct-realm-set-error"
FSM_RMT_INV_ERR_CODE_ERR_ADMIN_PASSWD_SET = "ERR-admin-passwd-set"
FSM_RMT_INV_ERR_CODE_ERR_AUTH_ISSUE = "ERR-auth-issue"
FSM_RMT_INV_ERR_CODE_ERR_AUTH_REALM_GET_ERROR = "ERR-auth-realm-get-error"
FSM_RMT_INV_ERR_CODE_ERR_AUTH_REALM_SET_ERROR = "ERR-auth-realm-set-error"
FSM_RMT_INV_ERR_CODE_ERR_AUTHENTICATION = "ERR-authentication"
FSM_RMT_INV_ERR_CODE_ERR_AUTHORIZATION_REQUIRED = "ERR-authorization-required"
FSM_RMT_INV_ERR_CODE_ERR_CLI_SESSION_LIMIT_REACHED = "ERR-cli-session-limit-reached"
FSM_RMT_INV_ERR_CODE_ERR_CREATE_KEYRING = "ERR-create-keyring"
FSM_RMT_INV_ERR_CODE_ERR_CREATE_LOCALE = "ERR-create-locale"
FSM_RMT_INV_ERR_CODE_ERR_CREATE_ROLE = "ERR-create-role"
FSM_RMT_INV_ERR_CODE_ERR_CREATE_TP = "ERR-create-tp"
FSM_RMT_INV_ERR_CODE_ERR_CREATE_USER = "ERR-create-user"
FSM_RMT_INV_ERR_CODE_ERR_DELETE_LOCALE = "ERR-delete-locale"
FSM_RMT_INV_ERR_CODE_ERR_DELETE_ROLE = "ERR-delete-role"
FSM_RMT_INV_ERR_CODE_ERR_DELETE_SESSION = "ERR-delete-session"
FSM_RMT_INV_ERR_CODE_ERR_DELETE_USER = "ERR-delete-user"
FSM_RMT_INV_ERR_CODE_ERR_DOWNGRADE_FAIL = "ERR-downgrade-fail"
FSM_RMT_INV_ERR_CODE_ERR_EFI_DIAGNOSTICS_IN_PROGRESS = "ERR-efi-Diagnostics--in-progress"
FSM_RMT_INV_ERR_CODE_ERR_ENABLE_MGMT_CONN = "ERR-enable-mgmt-conn"
FSM_RMT_INV_ERR_CODE_ERR_EP_SET_ERROR = "ERR-ep-set-error"
FSM_RMT_INV_ERR_CODE_ERR_GET_MAX_HTTP_USER_SESSIONS = "ERR-get-max-http-user-sessions"
FSM_RMT_INV_ERR_CODE_ERR_HTTP_INITIALIZING = "ERR-http-initializing"
FSM_RMT_INV_ERR_CODE_ERR_INSUFFICIENTLY_EQUIPPED = "ERR-insufficiently-equipped"
FSM_RMT_INV_ERR_CODE_ERR_INTERNAL_ERROR = "ERR-internal-error"
FSM_RMT_INV_ERR_CODE_ERR_LDAP_DELETE_ERROR = "ERR-ldap-delete-error"
FSM_RMT_INV_ERR_CODE_ERR_LDAP_GET_ERROR = "ERR-ldap-get-error"
FSM_RMT_INV_ERR_CODE_ERR_LDAP_GROUP_MODIFY_ERROR = "ERR-ldap-group-modify-error"
FSM_RMT_INV_ERR_CODE_ERR_LDAP_GROUP_SET_ERROR = "ERR-ldap-group-set-error"
FSM_RMT_INV_ERR_CODE_ERR_LDAP_SET_ERROR = "ERR-ldap-set-error"
FSM_RMT_INV_ERR_CODE_ERR_LOCALE_SET_ERROR = "ERR-locale-set-error"
FSM_RMT_INV_ERR_CODE_ERR_MAX_USERID_SESSIONS_REACHED = "ERR-max-userid-sessions-reached"
FSM_RMT_INV_ERR_CODE_ERR_MISSING_METHOD = "ERR-missing-method"
FSM_RMT_INV_ERR_CODE_ERR_MODIFY_LOCALE = "ERR-modify-locale"
FSM_RMT_INV_ERR_CODE_ERR_MODIFY_ROLE = "ERR-modify-role"
FSM_RMT_INV_ERR_CODE_ERR_MODIFY_USER = "ERR-modify-user"
FSM_RMT_INV_ERR_CODE_ERR_MODIFY_USER_LOCALE = "ERR-modify-user-locale"
FSM_RMT_INV_ERR_CODE_ERR_MODIFY_USER_ROLE = "ERR-modify-user-role"
FSM_RMT_INV_ERR_CODE_ERR_PROVIDER_GROUP_MODIFY_ERROR = "ERR-provider-group-modify-error"
FSM_RMT_INV_ERR_CODE_ERR_PROVIDER_GROUP_SET_ERROR = "ERR-provider-group-set-error"
FSM_RMT_INV_ERR_CODE_ERR_RADIUS_GET_ERROR = "ERR-radius-get-error"
FSM_RMT_INV_ERR_CODE_ERR_RADIUS_GLOBAL_SET_ERROR = "ERR-radius-global-set-error"
FSM_RMT_INV_ERR_CODE_ERR_RADIUS_GROUP_SET_ERROR = "ERR-radius-group-set-error"
FSM_RMT_INV_ERR_CODE_ERR_RADIUS_SET_ERROR = "ERR-radius-set-error"
FSM_RMT_INV_ERR_CODE_ERR_REQUEST_TIMEOUT = "ERR-request-timeout"
FSM_RMT_INV_ERR_CODE_ERR_RESET_ADAPTER = "ERR-reset-adapter"
FSM_RMT_INV_ERR_CODE_ERR_ROLE_SET_ERROR = "ERR-role-set-error"
FSM_RMT_INV_ERR_CODE_ERR_SECONDARY_NODE = "ERR-secondary-node"
FSM_RMT_INV_ERR_CODE_ERR_SERVICE_NOT_READY = "ERR-service-not-ready"
FSM_RMT_INV_ERR_CODE_ERR_SESSION_CACHE_FULL = "ERR-session-cache-full"
FSM_RMT_INV_ERR_CODE_ERR_SESSION_NOT_FOUND = "ERR-session-not-found"
FSM_RMT_INV_ERR_CODE_ERR_SET_KEY_CERT = "ERR-set-key-cert"
FSM_RMT_INV_ERR_CODE_ERR_SET_LOGIN_PROFILE = "ERR-set-login-profile"
FSM_RMT_INV_ERR_CODE_ERR_SET_MIN_PASSPHRASE_LENGTH = "ERR-set-min-passphrase-length"
FSM_RMT_INV_ERR_CODE_ERR_SET_NETWORK = "ERR-set-network"
FSM_RMT_INV_ERR_CODE_ERR_SET_PASSWORD_STRENGTH_CHECK = "ERR-set-password-strength-check"
FSM_RMT_INV_ERR_CODE_ERR_SET_PORT_CHANNEL = "ERR-set-port-channel"
FSM_RMT_INV_ERR_CODE_ERR_STORE_PRE_LOGIN_BANNER_MSG = "ERR-store-pre-login-banner-msg"
FSM_RMT_INV_ERR_CODE_ERR_TACACS_ENABLE_ERROR = "ERR-tacacs-enable-error"
FSM_RMT_INV_ERR_CODE_ERR_TACACS_GLOBAL_SET_ERROR = "ERR-tacacs-global-set-error"
FSM_RMT_INV_ERR_CODE_ERR_TACACS_GROUP_SET_ERROR = "ERR-tacacs-group-set-error"
FSM_RMT_INV_ERR_CODE_ERR_TACACS_PLUS_GET_ERROR = "ERR-tacacs-plus-get-error"
FSM_RMT_INV_ERR_CODE_ERR_TACACS_SET_ERROR = "ERR-tacacs-set-error"
FSM_RMT_INV_ERR_CODE_ERR_TEST_ERROR_1 = "ERR-test-error-1"
FSM_RMT_INV_ERR_CODE_ERR_TEST_ERROR_2 = "ERR-test-error-2"
FSM_RMT_INV_ERR_CODE_ERR_TIMEZONE_SET_ERROR = "ERR-timezone-set-error"
FSM_RMT_INV_ERR_CODE_ERR_USER_ACCOUNT_EXPIRED = "ERR-user-account-expired"
FSM_RMT_INV_ERR_CODE_ERR_USER_SET_ERROR = "ERR-user-set-error"
FSM_RMT_INV_ERR_CODE_ERR_XML_PARSE_ERROR = "ERR-xml-parse-error"
FSM_RMT_INV_ERR_CODE_NONE = "none"
FSM_STAMP_NEVER = "never"
FSM_STATUS_RESET_CMC_BEGIN = "ResetCmcBegin"
FSM_STATUS_RESET_CMC_EXECUTE = "ResetCmcExecute"
FSM_STATUS_RESET_CMC_FAIL = "ResetCmcFail"
FSM_STATUS_RESET_CMC_SUCCESS = "ResetCmcSuccess"
FSM_STATUS_NOP = "nop"
MANAGING_INSTANCE_A = "A"
MANAGING_INSTANCE_B = "B"
MANAGING_INSTANCE_NONE = "NONE"
MFG_TIME_NOT_APPLICABLE = "not-applicable"
OPER_STATE_ACCESSIBILITY_PROBLEM = "accessibility-problem"
OPER_STATE_AUTO_UPGRADE = "auto-upgrade"
OPER_STATE_BACKPLANE_PORT_PROBLEM = "backplane-port-problem"
OPER_STATE_BIOS_POST_TIMEOUT = "bios-post-timeout"
OPER_STATE_CHASSIS_INTRUSION = "chassis-intrusion"
OPER_STATE_CHASSIS_LIMIT_EXCEEDED = "chassis-limit-exceeded"
OPER_STATE_CONFIG = "config"
OPER_STATE_DECOMISSIONING = "decomissioning"
OPER_STATE_DEGRADED = "degraded"
OPER_STATE_DISABLED = "disabled"
OPER_STATE_DISCOVERY = "discovery"
OPER_STATE_DISCOVERY_FAILED = "discovery-failed"
OPER_STATE_EQUIPMENT_PROBLEM = "equipment-problem"
OPER_STATE_FABRIC_CONN_PROBLEM = "fabric-conn-problem"
OPER_STATE_FABRIC_UNSUPPORTED_CONN = "fabric-unsupported-conn"
OPER_STATE_IDENTIFY = "identify"
OPER_STATE_IDENTITY_UNESTABLISHABLE = "identity-unestablishable"
OPER_STATE_INOPERABLE = "inoperable"
OPER_STATE_LINK_ACTIVATE_BLOCKED = "link-activate-blocked"
OPER_STATE_MALFORMED_FRU = "malformed-fru"
OPER_STATE_NON_OPTIMAL = "non-optimal"
OPER_STATE_NON_OPTIMAL_SEVERE = "non-optimal-severe"
OPER_STATE_NOT_SUPPORTED = "not-supported"
OPER_STATE_OPERABLE = "operable"
OPER_STATE_PEER_COMM_PROBLEM = "peer-comm-problem"
OPER_STATE_PERFORMANCE_PROBLEM = "performance-problem"
OPER_STATE_POST_FAILURE = "post-failure"
OPER_STATE_POWER_PROBLEM = "power-problem"
OPER_STATE_POWERED_OFF = "powered-off"
OPER_STATE_REMOVED = "removed"
OPER_STATE_THERMAL_PROBLEM = "thermal-problem"
OPER_STATE_UNKNOWN = "unknown"
OPER_STATE_UNSUPPORTED_CONFIG = "unsupported-config"
OPER_STATE_UPGRADE_PROBLEM = "upgrade-problem"
OPER_STATE_VOLTAGE_PROBLEM = "voltage-problem"
OPERABILITY_ACCESSIBILITY_PROBLEM = "accessibility-problem"
OPERABILITY_AUTO_UPGRADE = "auto-upgrade"
OPERABILITY_BACKPLANE_PORT_PROBLEM = "backplane-port-problem"
OPERABILITY_BIOS_POST_TIMEOUT = "bios-post-timeout"
OPERABILITY_CHASSIS_INTRUSION = "chassis-intrusion"
OPERABILITY_CHASSIS_LIMIT_EXCEEDED = "chassis-limit-exceeded"
OPERABILITY_CONFIG = "config"
OPERABILITY_DECOMISSIONING = "decomissioning"
OPERABILITY_DEGRADED = "degraded"
OPERABILITY_DISABLED = "disabled"
OPERABILITY_DISCOVERY = "discovery"
OPERABILITY_DISCOVERY_FAILED = "discovery-failed"
OPERABILITY_EQUIPMENT_PROBLEM = "equipment-problem"
OPERABILITY_FABRIC_CONN_PROBLEM = "fabric-conn-problem"
OPERABILITY_FABRIC_UNSUPPORTED_CONN = "fabric-unsupported-conn"
OPERABILITY_IDENTIFY = "identify"
OPERABILITY_IDENTITY_UNESTABLISHABLE = "identity-unestablishable"
OPERABILITY_INOPERABLE = "inoperable"
OPERABILITY_LINK_ACTIVATE_BLOCKED = "link-activate-blocked"
OPERABILITY_MALFORMED_FRU = "malformed-fru"
OPERABILITY_NON_OPTIMAL = "non-optimal"
OPERABILITY_NON_OPTIMAL_SEVERE = "non-optimal-severe"
OPERABILITY_NOT_SUPPORTED = "not-supported"
OPERABILITY_OPERABLE = "operable"
OPERABILITY_PEER_COMM_PROBLEM = "peer-comm-problem"
OPERABILITY_PERFORMANCE_PROBLEM = "performance-problem"
OPERABILITY_POST_FAILURE = "post-failure"
OPERABILITY_POWER_PROBLEM = "power-problem"
OPERABILITY_POWERED_OFF = "powered-off"
OPERABILITY_REMOVED = "removed"
OPERABILITY_THERMAL_PROBLEM = "thermal-problem"
OPERABILITY_UNKNOWN = "unknown"
OPERABILITY_UNSUPPORTED_CONFIG = "unsupported-config"
OPERABILITY_UPGRADE_PROBLEM = "upgrade-problem"
OPERABILITY_VOLTAGE_PROBLEM = "voltage-problem"
PERF_LOWER_CRITICAL = "lower-critical"
PERF_LOWER_NON_CRITICAL = "lower-non-critical"
PERF_LOWER_NON_RECOVERABLE = "lower-non-recoverable"
PERF_NOT_SUPPORTED = "not-supported"
PERF_OK = "ok"
PERF_UNKNOWN = "unknown"
PERF_UPPER_CRITICAL = "upper-critical"
PERF_UPPER_NON_CRITICAL = "upper-non-critical"
PERF_UPPER_NON_RECOVERABLE = "upper-non-recoverable"
POWER_DEGRADED = "degraded"
POWER_ERROR = "error"
POWER_FAILED = "failed"
POWER_NOT_SUPPORTED = "not-supported"
POWER_OFF = "off"
POWER_OFFDUTY = "offduty"
POWER_OFFLINE = "offline"
POWER_OK = "ok"
POWER_ON = "on"
POWER_ONLINE = "online"
POWER_POWER_SAVE = "power-save"
POWER_TEST = "test"
POWER_UNKNOWN = "unknown"
PRESENCE_EMPTY = "empty"
PRESENCE_EQUIPPED = "equipped"
PRESENCE_EQUIPPED_DEPRECATED = "equipped-deprecated"
PRESENCE_EQUIPPED_DISC_ERROR = "equipped-disc-error"
PRESENCE_EQUIPPED_DISC_IN_PROGRESS = "equipped-disc-in-progress"
PRESENCE_EQUIPPED_DISC_NOT_STARTED = "equipped-disc-not-started"
PRESENCE_EQUIPPED_DISC_UNKNOWN = "equipped-disc-unknown"
PRESENCE_EQUIPPED_IDENTITY_UNESTABLISHABLE = "equipped-identity-unestablishable"
PRESENCE_EQUIPPED_NOT_PRIMARY = "equipped-not-primary"
PRESENCE_EQUIPPED_SLAVE = "equipped-slave"
PRESENCE_EQUIPPED_UNSUPPORTED = "equipped-unsupported"
PRESENCE_EQUIPPED_WITH_MALFORMED_FRU = "equipped-with-malformed-fru"
PRESENCE_INACCESSIBLE = "inaccessible"
PRESENCE_MISMATCH = "mismatch"
PRESENCE_MISMATCH_IDENTITY_UNESTABLISHABLE = "mismatch-identity-unestablishable"
PRESENCE_MISMATCH_SLAVE = "mismatch-slave"
PRESENCE_MISSING = "missing"
PRESENCE_MISSING_SLAVE = "missing-slave"
PRESENCE_NOT_SUPPORTED = "not-supported"
PRESENCE_UNAUTHORIZED = "unauthorized"
PRESENCE_UNKNOWN = "unknown"
RE_INSERT_FALSE = "false"
RE_INSERT_NO = "no"
RE_INSERT_TRUE = "true"
RE_INSERT_YES = "yes"
ROLE_ACTIVE = "active"
ROLE_STANDBY = "standby"
ROLE_UNKNOWN = "unknown"
THERMAL_LOWER_CRITICAL = "lower-critical"
THERMAL_LOWER_NON_CRITICAL = "lower-non-critical"
THERMAL_LOWER_NON_RECOVERABLE = "lower-non-recoverable"
THERMAL_NOT_SUPPORTED = "not-supported"
THERMAL_OK = "ok"
THERMAL_UNKNOWN = "unknown"
THERMAL_UPPER_CRITICAL = "upper-critical"
THERMAL_UPPER_NON_CRITICAL = "upper-non-critical"
THERMAL_UPPER_NON_RECOVERABLE = "upper-non-recoverable"
VOLTAGE_LOWER_CRITICAL = "lower-critical"
VOLTAGE_LOWER_NON_CRITICAL = "lower-non-critical"
VOLTAGE_LOWER_NON_RECOVERABLE = "lower-non-recoverable"
VOLTAGE_NOT_SUPPORTED = "not-supported"
VOLTAGE_OK = "ok"
VOLTAGE_UNKNOWN = "unknown"
VOLTAGE_UPPER_CRITICAL = "upper-critical"
VOLTAGE_UPPER_NON_CRITICAL = "upper-non-critical"
VOLTAGE_UPPER_NON_RECOVERABLE = "upper-non-recoverable"
class EquipmentSystemIOController(ManagedObject):
"""This is EquipmentSystemIOController class."""
consts = EquipmentSystemIOControllerConsts()
naming_props = set(['id'])
mo_meta = MoMeta("EquipmentSystemIOController", "equipmentSystemIOController", "slot-[id]", VersionMeta.Version312b, "InputOutput", 0x1ff, [], ["admin", "ls-network", "ls-network-policy", "pn-equipment", "pn-maintenance", "pn-policy"], ['equipmentChassis'], ['computeBoardController', 'equipmentSharedIOModule', 'equipmentSiocTempStats', 'equipmentSystemIOControllerFsm', 'equipmentSystemIOControllerFsmTask', 'eventInst', 'faultInst', 'mgmtController'], ["Get", "Set"])
prop_meta = {
"admin_power_state": MoPropertyMeta("admin_power_state", "adminPowerState", "string", VersionMeta.Version312b, MoPropertyMeta.READ_WRITE, 0x2, None, None, None, ["cycle-immediate", "cycle-wait", "policy"], []),
"admin_state": MoPropertyMeta("admin_state", "adminState", "string", VersionMeta.Version312b, MoPropertyMeta.READ_WRITE, 0x4, None, None, None, ["acknowledged", "auto-acknowledge", "decommission", "disable-port-channel", "enable-port-channel", "re-acknowledge", "remove"], []),
"asset_tag": MoPropertyMeta("asset_tag", "assetTag", "string", VersionMeta.Version321d, MoPropertyMeta.READ_ONLY, None, None, None, r"""[ !#$%&\(\)\*\+,\-\./:;\?@\[\]_\{\|\}~a-zA-Z0-9]{0,32}""", [], []),
"chassis_id": MoPropertyMeta("chassis_id", "chassisId", "string", VersionMeta.Version312b, MoPropertyMeta.READ_ONLY, None, None, None, None, ["N/A"], ["0-255"]),
"check_point": MoPropertyMeta("check_point", "checkPoint", "string", VersionMeta.Version312b, MoPropertyMeta.READ_ONLY, None, None, None, None, ["deep-checkpoint", "discovered", "removing", "shallow-checkpoint", "unknown"], []),
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version312b, MoPropertyMeta.INTERNAL, 0x8, None, None, r"""((deleteAll|ignore|deleteNonPresent),){0,2}(deleteAll|ignore|deleteNonPresent){0,1}""", [], []),
"config_state": MoPropertyMeta("config_state", "configState", "string", VersionMeta.Version312b, MoPropertyMeta.READ_ONLY, None, None, None, None, ["ack-in-progress", "acknowledged", "auto-ack", "evaluation", "ok", "removing", "un-acknowledged", "un-initialized", "unsupported-connectivity"], []),
"conn_path": MoPropertyMeta("conn_path", "connPath", "string", VersionMeta.Version312b, MoPropertyMeta.READ_ONLY, None, None, None, r"""((defaultValue|unknown|A|B),){0,3}(defaultValue|unknown|A|B){0,1}""", [], []),
"conn_status": MoPropertyMeta("conn_status", "connStatus", "string", VersionMeta.Version312b, MoPropertyMeta.READ_ONLY, None, None, None, r"""((defaultValue|unknown|A|B),){0,3}(defaultValue|unknown|A|B){0,1}""", [], []),
"discovery": MoPropertyMeta("discovery", "discovery", "string", VersionMeta.Version312b, MoPropertyMeta.READ_ONLY, None, None, None, None, ["auto-upgrading", "discovered", "offline", "online", "pinglost", "unknown", "unsecure", "unsupported-connectivity"], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version312b, MoPropertyMeta.READ_ONLY, 0x10, 0, 256, None, [], []),
"flt_aggr": MoPropertyMeta("flt_aggr", "fltAggr", "ulong", VersionMeta.Version312b, MoPropertyMeta.INTERNAL, None, None, None, None, [], []),
"fsm_descr": MoPropertyMeta("fsm_descr", "fsmDescr", "string", VersionMeta.Version312b, MoPropertyMeta.INTERNAL, None, None, None, None, [], []),
"fsm_prev": MoPropertyMeta("fsm_prev", "fsmPrev", "string", VersionMeta.Version312b, MoPropertyMeta.INTERNAL, None, None, None, None, ["ResetCmcBegin", "ResetCmcExecute", "ResetCmcFail", "ResetCmcSuccess", "nop"], []),
"fsm_progr": MoPropertyMeta("fsm_progr", "fsmProgr", "byte", VersionMeta.Version312b, MoPropertyMeta.INTERNAL, None, None, None, None, [], ["0-100"]),
"fsm_rmt_inv_err_code": MoPropertyMeta("fsm_rmt_inv_err_code", "fsmRmtInvErrCode", "string", VersionMeta.Version312b, MoPropertyMeta.INTERNAL, None, None, None, None, ["ERR-2fa-auth-retry", "ERR-ACTIVATE-failed", "ERR-ACTIVATE-in-progress", "ERR-ACTIVATE-retry", "ERR-BIOS-TOKENS-OLD-BIOS", "ERR-BIOS-TOKENS-OLD-CIMC", "ERR-BIOS-network-boot-order-not-found", "ERR-BOARDCTRLUPDATE-ignore", "ERR-DIAG-cancelled", "ERR-DIAG-fsm-restarted", "ERR-DIAG-test-failed", "ERR-DNLD-authentication-failure", "ERR-DNLD-hostkey-mismatch", "ERR-DNLD-invalid-image", "ERR-DNLD-no-file", "ERR-DNLD-no-space", "ERR-DNLD-usb-unmounted", "ERR-DNS-delete-error", "ERR-DNS-get-error", "ERR-DNS-set-error", "ERR-Diagnostics-in-progress", "ERR-Diagnostics-memtest-in-progress", "ERR-Diagnostics-network-in-progress", "ERR-FILTER-illegal-format", "ERR-FSM-no-such-state", "ERR-HOST-fru-identity-mismatch", "ERR-HTTP-set-error", "ERR-HTTPS-set-error", "ERR-IBMC-analyze-results", "ERR-IBMC-connect-error", "ERR-IBMC-connector-info-retrieval-error", "ERR-IBMC-fru-retrieval-error", "ERR-IBMC-invalid-end-point-config", "ERR-IBMC-results-not-ready", "ERR-MAX-subscriptions-allowed-error", "ERR-MO-CONFIG-child-object-cant-be-configured", "ERR-MO-META-no-such-object-class", "ERR-MO-PROPERTY-no-such-property", "ERR-MO-PROPERTY-value-out-of-range", "ERR-MO-access-denied", "ERR-MO-deletion-rule-violation", "ERR-MO-duplicate-object", "ERR-MO-illegal-containment", "ERR-MO-illegal-creation", "ERR-MO-illegal-iterator-state", "ERR-MO-illegal-object-lifecycle-transition", "ERR-MO-naming-rule-violation", "ERR-MO-object-not-found", "ERR-MO-resource-allocation", "ERR-NTP-delete-error", "ERR-NTP-get-error", "ERR-NTP-set-error", "ERR-POWER-CAP-UNSUPPORTED", "ERR-POWER-PROFILE-IN-PROGRESS", "ERR-SERVER-mis-connect", "ERR-SWITCH-invalid-if-config", "ERR-TOKEN-request-denied", "ERR-UNABLE-TO-FETCH-BIOS-SETTINGS", "ERR-UPDATE-failed", "ERR-UPDATE-in-progress", "ERR-UPDATE-retry", "ERR-aaa-config-modify-error", "ERR-acct-realm-set-error", "ERR-admin-passwd-set", "ERR-auth-issue", "ERR-auth-realm-get-error", "ERR-auth-realm-set-error", "ERR-authentication", "ERR-authorization-required", "ERR-cli-session-limit-reached", "ERR-create-keyring", "ERR-create-locale", "ERR-create-role", "ERR-create-tp", "ERR-create-user", "ERR-delete-locale", "ERR-delete-role", "ERR-delete-session", "ERR-delete-user", "ERR-downgrade-fail", "ERR-efi-Diagnostics--in-progress", "ERR-enable-mgmt-conn", "ERR-ep-set-error", "ERR-get-max-http-user-sessions", "ERR-http-initializing", "ERR-insufficiently-equipped", "ERR-internal-error", "ERR-ldap-delete-error", "ERR-ldap-get-error", "ERR-ldap-group-modify-error", "ERR-ldap-group-set-error", "ERR-ldap-set-error", "ERR-locale-set-error", "ERR-max-userid-sessions-reached", "ERR-missing-method", "ERR-modify-locale", "ERR-modify-role", "ERR-modify-user", "ERR-modify-user-locale", "ERR-modify-user-role", "ERR-provider-group-modify-error", "ERR-provider-group-set-error", "ERR-radius-get-error", "ERR-radius-global-set-error", "ERR-radius-group-set-error", "ERR-radius-set-error", "ERR-request-timeout", "ERR-reset-adapter", "ERR-role-set-error", "ERR-secondary-node", "ERR-service-not-ready", "ERR-session-cache-full", "ERR-session-not-found", "ERR-set-key-cert", "ERR-set-login-profile", "ERR-set-min-passphrase-length", "ERR-set-network", "ERR-set-password-strength-check", "ERR-set-port-channel", "ERR-store-pre-login-banner-msg", "ERR-tacacs-enable-error", "ERR-tacacs-global-set-error", "ERR-tacacs-group-set-error", "ERR-tacacs-plus-get-error", "ERR-tacacs-set-error", "ERR-test-error-1", "ERR-test-error-2", "ERR-timezone-set-error", "ERR-user-account-expired", "ERR-user-set-error", "ERR-xml-parse-error", "none"], ["0-4294967295"]),
"fsm_rmt_inv_err_descr": MoPropertyMeta("fsm_rmt_inv_err_descr", "fsmRmtInvErrDescr", "string", VersionMeta.Version312b, MoPropertyMeta.INTERNAL, None, 0, 510, None, [], []),
"fsm_rmt_inv_rslt": MoPropertyMeta("fsm_rmt_inv_rslt", "fsmRmtInvRslt", "string", VersionMeta.Version312b, MoPropertyMeta.INTERNAL, None, None, None, r"""((defaultValue|not-applicable|resource-unavailable|service-unavailable|intermittent-error|sw-defect|service-not-implemented-ignore|extend-timeout|capability-not-implemented-failure|illegal-fru|end-point-unavailable|failure|resource-capacity-exceeded|service-protocol-error|fw-defect|service-not-implemented-fail|task-reset|unidentified-fail|capability-not-supported|end-point-failed|fru-state-indeterminate|resource-dependency|fru-identity-indeterminate|internal-error|hw-defect|service-not-supported|fru-not-supported|end-point-protocol-error|capability-unavailable|fru-not-ready|capability-not-implemented-ignore|fru-info-malformed|timeout),){0,32}(defaultValue|not-applicable|resource-unavailable|service-unavailable|intermittent-error|sw-defect|service-not-implemented-ignore|extend-timeout|capability-not-implemented-failure|illegal-fru|end-point-unavailable|failure|resource-capacity-exceeded|service-protocol-error|fw-defect|service-not-implemented-fail|task-reset|unidentified-fail|capability-not-supported|end-point-failed|fru-state-indeterminate|resource-dependency|fru-identity-indeterminate|internal-error|hw-defect|service-not-supported|fru-not-supported|end-point-protocol-error|capability-unavailable|fru-not-ready|capability-not-implemented-ignore|fru-info-malformed|timeout){0,1}""", [], []),
"fsm_stage_descr": MoPropertyMeta("fsm_stage_descr", "fsmStageDescr", "string", VersionMeta.Version312b, MoPropertyMeta.INTERNAL, None, None, None, None, [], []),
"fsm_stamp": MoPropertyMeta("fsm_stamp", "fsmStamp", "string", VersionMeta.Version312b, MoPropertyMeta.INTERNAL, None, None, None, r"""([0-9]){4}-([0-9]){2}-([0-9]){2}T([0-9]){2}:([0-9]){2}:([0-9]){2}((\.([0-9]){3})){0,1}""", ["never"], []),
"fsm_status": MoPropertyMeta("fsm_status", "fsmStatus", "string", VersionMeta.Version312b, MoPropertyMeta.INTERNAL, None, None, None, None, ["ResetCmcBegin", "ResetCmcExecute", "ResetCmcFail", "ResetCmcSuccess", "nop"], []),
"fsm_try": MoPropertyMeta("fsm_try", "fsmTry", "byte", VersionMeta.Version312b, MoPropertyMeta.INTERNAL, None, None, None, None, [], []),
"id": MoPropertyMeta("id", "id", "uint", VersionMeta.Version312b, MoPropertyMeta.NAMING, 0x20, None, None, None, [], ["1-2"]),
"lc_name": MoPropertyMeta("lc_name", "lcName", "string", VersionMeta.Version312b, MoPropertyMeta.READ_ONLY, None, None, None, r"""[\-\.:_a-zA-Z0-9]{0,16}""", [], []),
"lc_ts": MoPropertyMeta("lc_ts", "lcTs", "string", VersionMeta.Version312b, MoPropertyMeta.READ_ONLY, None, None, None, r"""([0-9]){4}-([0-9]){2}-([0-9]){2}T([0-9]){2}:([0-9]){2}:([0-9]){2}((\.([0-9]){3})){0,1}""", [], []),
"managing_instance": MoPropertyMeta("managing_instance", "managingInstance", "string", VersionMeta.Version312b, MoPropertyMeta.READ_ONLY, None, None, None, None, ["A", "B", "NONE"], []),
"mfg_time": MoPropertyMeta("mfg_time", "mfgTime", "string", VersionMeta.Version312b, MoPropertyMeta.READ_ONLY, None, None, None, r"""([0-9]){4}-([0-9]){2}-([0-9]){2}T([0-9]){2}:([0-9]){2}:([0-9]){2}((\.([0-9]){3})){0,1}""", ["not-applicable"], []),
"model": MoPropertyMeta("model", "model", "string", VersionMeta.Version312b, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"oper_qualifier": MoPropertyMeta("oper_qualifier", "operQualifier", "string", VersionMeta.Version312b, MoPropertyMeta.READ_ONLY, None, None, None, r"""((defaultValue|not-applicable|thermal|inoperable|voltage|perf|power|removed|fabric-port-problem|post-failure|server-port-problem|fabricpc-link-auto-ack-blocked|backplane-port-problem),){0,12}(defaultValue|not-applicable|thermal|inoperable|voltage|perf|power|removed|fabric-port-problem|post-failure|server-port-problem|fabricpc-link-auto-ack-blocked|backplane-port-problem){0,1}""", [], []),
"oper_qualifier_reason": MoPropertyMeta("oper_qualifier_reason", "operQualifierReason", "string", VersionMeta.Version312b, MoPropertyMeta.READ_ONLY, None, None, None, r"""[ !#$%&\(\)\*\+,\-\./:;\?@\[\]_\{\|\}~a-zA-Z0-9]{0,256}""", [], []),
"oper_state": MoPropertyMeta("oper_state", "operState", "string", VersionMeta.Version312b, MoPropertyMeta.READ_ONLY, None, None, None, None, ["accessibility-problem", "auto-upgrade", "backplane-port-problem", "bios-post-timeout", "chassis-intrusion", "chassis-limit-exceeded", "config", "decomissioning", "degraded", "disabled", "discovery", "discovery-failed", "equipment-problem", "fabric-conn-problem", "fabric-unsupported-conn", "identify", "identity-unestablishable", "inoperable", "link-activate-blocked", "malformed-fru", "non-optimal", "non-optimal-severe", "not-supported", "operable", "peer-comm-problem", "performance-problem", "post-failure", "power-problem", "powered-off", "removed", "thermal-problem", "unknown", "unsupported-config", "upgrade-problem", "voltage-problem"], []),
"operability": MoPropertyMeta("operability", "operability", "string", VersionMeta.Version312b, MoPropertyMeta.READ_ONLY, None, None, None, None, ["accessibility-problem", "auto-upgrade", "backplane-port-problem", "bios-post-timeout", "chassis-intrusion", "chassis-limit-exceeded", "config", "decomissioning", "degraded", "disabled", "discovery", "discovery-failed", "equipment-problem", "fabric-conn-problem", "fabric-unsupported-conn", "identify", "identity-unestablishable", "inoperable", "link-activate-blocked", "malformed-fru", "non-optimal", "non-optimal-severe", "not-supported", "operable", "peer-comm-problem", "performance-problem", "post-failure", "power-problem", "powered-off", "removed", "thermal-problem", "unknown", "unsupported-config", "upgrade-problem", "voltage-problem"], []),
"part_number": MoPropertyMeta("part_number", "partNumber", "string", VersionMeta.Version312b, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"perf": MoPropertyMeta("perf", "perf", "string", VersionMeta.Version312b, MoPropertyMeta.READ_ONLY, None, None, None, None, ["lower-critical", "lower-non-critical", "lower-non-recoverable", "not-supported", "ok", "unknown", "upper-critical", "upper-non-critical", "upper-non-recoverable"], []),
"power": MoPropertyMeta("power", "power", "string", VersionMeta.Version312b, MoPropertyMeta.READ_ONLY, None, None, None, None, ["degraded", "error", "failed", "not-supported", "off", "offduty", "offline", "ok", "on", "online", "power-save", "test", "unknown"], []),
"presence": MoPropertyMeta("presence", "presence", "string", VersionMeta.Version312b, MoPropertyMeta.READ_ONLY, None, None, None, None, ["empty", "equipped", "equipped-deprecated", "equipped-disc-error", "equipped-disc-in-progress", "equipped-disc-not-started", "equipped-disc-unknown", "equipped-identity-unestablishable", "equipped-not-primary", "equipped-slave", "equipped-unsupported", "equipped-with-malformed-fru", "inaccessible", "mismatch", "mismatch-identity-unestablishable", "mismatch-slave", "missing", "missing-slave", "not-supported", "unauthorized", "unknown"], []),
"re_insert": MoPropertyMeta("re_insert", "reInsert", "string", VersionMeta.Version312b, MoPropertyMeta.READ_ONLY, None, None, None, None, ["false", "no", "true", "yes"], []),
"reachability": MoPropertyMeta("reachability", "reachability", "string", VersionMeta.Version312b, MoPropertyMeta.READ_ONLY, None, None, None, r"""((defaultValue|A|B|unmanaged),){0,3}(defaultValue|A|B|unmanaged){0,1}""", [], []),
"revision": MoPropertyMeta("revision", "revision", "string", VersionMeta.Version312b, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version312b, MoPropertyMeta.READ_ONLY, 0x40, 0, 256, None, [], []),
"role": MoPropertyMeta("role", "role", "string", VersionMeta.Version312b, MoPropertyMeta.READ_ONLY, None, None, None, None, ["active", "standby", "unknown"], []),
"sacl": MoPropertyMeta("sacl", "sacl", "string", VersionMeta.Version312b, MoPropertyMeta.READ_ONLY, None, None, None, r"""((none|del|mod|addchild|cascade),){0,4}(none|del|mod|addchild|cascade){0,1}""", [], []),
"serial": MoPropertyMeta("serial", "serial", "string", VersionMeta.Version312b, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version312b, MoPropertyMeta.READ_WRITE, 0x80, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []),
"thermal": MoPropertyMeta("thermal", "thermal", "string", VersionMeta.Version312b, MoPropertyMeta.READ_ONLY, None, None, None, None, ["lower-critical", "lower-non-critical", "lower-non-recoverable", "not-supported", "ok", "unknown", "upper-critical", "upper-non-critical", "upper-non-recoverable"], []),
"usr_lbl": MoPropertyMeta("usr_lbl", "usrLbl", "string", VersionMeta.Version312b, MoPropertyMeta.READ_WRITE, 0x100, None, None, r"""[ !#$%&\(\)\*\+,\-\./:;\?@\[\]_\{\|\}~a-zA-Z0-9]{0,32}""", [], []),
"vendor": MoPropertyMeta("vendor", "vendor", "string", VersionMeta.Version312b, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"vid": MoPropertyMeta("vid", "vid", "string", VersionMeta.Version312b, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"voltage": MoPropertyMeta("voltage", "voltage", "string", VersionMeta.Version312b, MoPropertyMeta.READ_ONLY, None, None, None, None, ["lower-critical", "lower-non-critical", "lower-non-recoverable", "not-supported", "ok", "unknown", "upper-critical", "upper-non-critical", "upper-non-recoverable"], []),
}
prop_map = {
"adminPowerState": "admin_power_state",
"adminState": "admin_state",
"assetTag": "asset_tag",
"chassisId": "chassis_id",
"checkPoint": "check_point",
"childAction": "child_action",
"configState": "config_state",
"connPath": "conn_path",
"connStatus": "conn_status",
"discovery": "discovery",
"dn": "dn",
"fltAggr": "flt_aggr",
"fsmDescr": "fsm_descr",
"fsmPrev": "fsm_prev",
"fsmProgr": "fsm_progr",
"fsmRmtInvErrCode": "fsm_rmt_inv_err_code",
"fsmRmtInvErrDescr": "fsm_rmt_inv_err_descr",
"fsmRmtInvRslt": "fsm_rmt_inv_rslt",
"fsmStageDescr": "fsm_stage_descr",
"fsmStamp": "fsm_stamp",
"fsmStatus": "fsm_status",
"fsmTry": "fsm_try",
"id": "id",
"lcName": "lc_name",
"lcTs": "lc_ts",
"managingInstance": "managing_instance",
"mfgTime": "mfg_time",
"model": "model",
"operQualifier": "oper_qualifier",
"operQualifierReason": "oper_qualifier_reason",
"operState": "oper_state",
"operability": "operability",
"partNumber": "part_number",
"perf": "perf",
"power": "power",
"presence": "presence",
"reInsert": "re_insert",
"reachability": "reachability",
"revision": "revision",
"rn": "rn",
"role": "role",
"sacl": "sacl",
"serial": "serial",
"status": "status",
"thermal": "thermal",
"usrLbl": "usr_lbl",
"vendor": "vendor",
"vid": "vid",
"voltage": "voltage",
}
def __init__(self, parent_mo_or_dn, id, **kwargs):
self._dirty_mask = 0
self.id = id
self.admin_power_state = None
self.admin_state = None
self.asset_tag = None
self.chassis_id = None
self.check_point = None
self.child_action = None
self.config_state = None
self.conn_path = None
self.conn_status = None
self.discovery = None
self.flt_aggr = None
self.fsm_descr = None
self.fsm_prev = None
self.fsm_progr = None
self.fsm_rmt_inv_err_code = None
self.fsm_rmt_inv_err_descr = None
self.fsm_rmt_inv_rslt = None
self.fsm_stage_descr = None
self.fsm_stamp = None
self.fsm_status = None
self.fsm_try = None
self.lc_name = None
self.lc_ts = None
self.managing_instance = None
self.mfg_time = None
self.model = None
self.oper_qualifier = None
self.oper_qualifier_reason = None
self.oper_state = None
self.operability = None
self.part_number = None
self.perf = None
self.power = None
self.presence = None
self.re_insert = None
self.reachability = None
self.revision = None
self.role = None
self.sacl = None
self.serial = None
self.status = None
self.thermal = None
self.usr_lbl = None
self.vendor = None
self.vid = None
self.voltage = None
ManagedObject.__init__(self, "EquipmentSystemIOController", parent_mo_or_dn, **kwargs)
| 80.97561 | 3,747 | 0.738253 |
cc776d06187c1fb044f4d5ba882197cce31ba68c | 260 | py | Python | smallRNA/manager/templatetags/custom_tags.py | ahmetrasit/smallRNA | 9e76065b6fdc834b46537519b5aa0c4a0290cfdc | [
"MIT"
] | 1 | 2018-02-22T07:16:29.000Z | 2018-02-22T07:16:29.000Z | smallRNA/manager/templatetags/custom_tags.py | ahmetrasit/smallRNA | 9e76065b6fdc834b46537519b5aa0c4a0290cfdc | [
"MIT"
] | null | null | null | smallRNA/manager/templatetags/custom_tags.py | ahmetrasit/smallRNA | 9e76065b6fdc834b46537519b5aa0c4a0290cfdc | [
"MIT"
] | null | null | null | from django import template
from django.contrib.auth.models import Group
register = template.Library()
@register.filter(name='has_group')
def has_group(user, group_name):
group = Group.objects.get(name=group_name)
return group in user.groups.all()
| 26 | 47 | 0.761538 |
94c43045eefb770e0aa45cd2aa5c802436fa3973 | 4,373 | py | Python | library/oneview_enclosure_group.py | soodpr/oneview-ansible | f4f07062eda3f6dc17f4c306f376ddebb7313fbb | [
"Apache-2.0"
] | null | null | null | library/oneview_enclosure_group.py | soodpr/oneview-ansible | f4f07062eda3f6dc17f4c306f376ddebb7313fbb | [
"Apache-2.0"
] | null | null | null | library/oneview_enclosure_group.py | soodpr/oneview-ansible | f4f07062eda3f6dc17f4c306f376ddebb7313fbb | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
# -*- coding: utf-8 -*-
###
# Copyright (2016-2020) Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# You may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###
ANSIBLE_METADATA = {'status': ['stableinterface'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: oneview_enclosure_group
short_description: Manage OneView Enclosure Group resources.
description:
- Provides an interface to manage Enclosure Group resources. Can create, update, or delete.
version_added: "2.3"
requirements:
- "python >= 2.7.9"
- "hpOneView >= 5.0.0"
author: "Gustavo Hennig (@GustavoHennig)"
options:
state:
description:
- Indicates the desired state for the Enclosure Group resource.
C(present) will ensure data properties are compliant with OneView.
C(absent) will remove the resource from OneView, if it exists.
choices: ['present', 'absent']
data:
description:
- List with Enclosure Group properties.
required: true
extends_documentation_fragment:
- oneview
'''
EXAMPLES = '''
- name: Ensure that Enclosure Group is present using the default configuration
oneview_enclosure_group:
hostname: 172.16.101.48
username: administrator
password: my_password
api_version: 1200
state: present
data:
name: "Enclosure Group 1"
stackingMode: "Enclosure" # Not supported in V600
interconnectBayMappings:
- interconnectBay: 1
- interconnectBay: 2
- interconnectBay: 3
- interconnectBay: 4
- interconnectBay: 5
- interconnectBay: 6
- interconnectBay: 7
- interconnectBay: 8
delegate_to: localhost
- name: Update the Enclosure Group changing the name attribute
oneview_enclosure_group:
hostname: 172.16.101.48
username: administrator
password: my_password
api_version: 1200
state: present
data:
name: "Enclosure Group 1"
newName: "Enclosure Group 1 (renamed)"
delegate_to: localhost
- name: Ensure that Enclosure Group is absent
oneview_enclosure_group:
hostname: 172.16.101.48
username: administrator
password: my_password
api_version: 1200
state: absent
data:
name: "Enclosure Group 1 (renamed)"
delegate_to: localhost
'''
RETURN = '''
enclosure_group:
description: Has the facts about the Enclosure Group.
returned: On state 'present'. Can be null.
type: dict
'''
from ansible.module_utils.oneview import OneViewModule
class EnclosureGroupModule(OneViewModule):
MSG_CREATED = 'Enclosure Group created successfully.'
MSG_UPDATED = 'Enclosure Group updated successfully.'
MSG_DELETED = 'Enclosure Group deleted successfully.'
MSG_ALREADY_PRESENT = 'Enclosure Group is already present.'
MSG_ALREADY_ABSENT = 'Enclosure Group is already absent.'
argument_spec = dict(
state=dict(
required=True,
choices=['present', 'absent']
),
data=dict(required=True, type='dict')
)
def __init__(self):
super(EnclosureGroupModule, self).__init__(additional_arg_spec=self.argument_spec)
self.set_resource_object(self.oneview_client.enclosure_groups)
def execute_module(self):
if self.state == 'present':
if self.current_resource and "configurationScript" in self.data:
if self.data['configurationScript'] == self.current_resource.get_script():
del self.data['configurationScript']
return self.resource_present('enclosure_group')
elif self.state == 'absent':
return self.resource_absent()
def main():
EnclosureGroupModule().run()
if __name__ == '__main__':
main()
| 30.795775 | 95 | 0.669792 |
c48e7fe17a7b76d12883ea34f8e584c3f61f7c21 | 2,809 | py | Python | sqlalchemy_continuum/__init__.py | labnook/sqlalchemy-continuum | c9273c5f88a05497f01fe857093d07d64685cbc1 | [
"BSD-3-Clause"
] | null | null | null | sqlalchemy_continuum/__init__.py | labnook/sqlalchemy-continuum | c9273c5f88a05497f01fe857093d07d64685cbc1 | [
"BSD-3-Clause"
] | 3 | 2018-09-26T19:58:45.000Z | 2021-07-28T21:26:49.000Z | sqlalchemy_continuum/__init__.py | labnook/sqlalchemy-continuum | c9273c5f88a05497f01fe857093d07d64685cbc1 | [
"BSD-3-Clause"
] | 1 | 2020-11-10T23:02:23.000Z | 2020-11-10T23:02:23.000Z | import sqlalchemy as sa
from .exc import ClassNotVersioned, ImproperlyConfigured
from .manager import VersioningManager
from .operation import Operation
from .transaction import TransactionFactory
from .unit_of_work import UnitOfWork
from .utils import (
changeset,
count_versions,
get_versioning_manager,
is_modified,
is_session_modified,
parent_class,
transaction_class,
tx_column_name,
vacuum,
version_class,
version_table,
apply_table_schema,
)
__version__ = '1.4.3'
versioning_manager = VersioningManager()
def make_versioned(
mapper=sa.orm.mapper,
session=sa.orm.session.Session,
manager=versioning_manager,
plugins=None,
options=None,
user_cls='User'
):
"""
This is the public API function of SQLAlchemy-Continuum for making certain
mappers and sessions versioned. By default this applies to all mappers and
all sessions.
:param mapper:
SQLAlchemy mapper to apply the versioning to.
:param session:
SQLAlchemy session to apply the versioning to. By default this is
sa.orm.session.Session meaning it applies to all Session subclasses.
:param manager:
SQLAlchemy-Continuum versioning manager.
:param plugins:
Plugins to pass for versioning manager.
:param options:
A dictionary of VersioningManager options.
:param user_cls:
User class which the Transaction class should have relationship to.
This can either be a class or string name of a class for lazy
evaluation.
"""
if plugins is not None:
manager.plugins = plugins
if options is not None:
manager.options.update(options)
manager.user_cls = user_cls
manager.apply_class_configuration_listeners(mapper)
manager.track_operations(mapper)
manager.track_session(session)
sa.event.listen(
sa.engine.Engine,
'before_cursor_execute',
manager.track_association_operations
)
def remove_versioning(
mapper=sa.orm.mapper,
session=sa.orm.session.Session,
manager=versioning_manager
):
"""
Remove the versioning from given mapper / session and manager.
:param mapper:
SQLAlchemy mapper to remove the versioning from.
:param session:
SQLAlchemy session to remove the versioning from. By default this is
sa.orm.session.Session meaning it applies to all sessions.
:param manager:
SQLAlchemy-Continuum versioning manager.
"""
manager.reset()
manager.remove_class_configuration_listeners(mapper)
manager.remove_operations_tracking(mapper)
manager.remove_session_tracking(session)
sa.event.remove(
sa.engine.Engine,
'before_cursor_execute',
manager.track_association_operations
)
| 27.811881 | 78 | 0.712709 |
f8fdad3345d8c42392bde9f8a2c764ad0d0a5574 | 1,945 | py | Python | dicom2rawiv/zipextract/gdcm-2.6.3/Examples/Python/ScanDirectory.py | chipbuster/skull-atlas | 7f3ee009e1d5f65f101fe853a2cf6e12662970ee | [
"BSD-3-Clause"
] | 47 | 2020-03-30T14:36:46.000Z | 2022-03-06T07:44:54.000Z | dicom2rawiv/zipextract/gdcm-2.6.3/Examples/Python/ScanDirectory.py | chipbuster/skull-atlas | 7f3ee009e1d5f65f101fe853a2cf6e12662970ee | [
"BSD-3-Clause"
] | null | null | null | dicom2rawiv/zipextract/gdcm-2.6.3/Examples/Python/ScanDirectory.py | chipbuster/skull-atlas | 7f3ee009e1d5f65f101fe853a2cf6e12662970ee | [
"BSD-3-Clause"
] | 8 | 2020-04-01T01:22:45.000Z | 2022-01-02T13:06:09.000Z | ############################################################################
#
# Program: GDCM (Grassroots DICOM). A DICOM library
#
# Copyright (c) 2006-2011 Mathieu Malaterre
# All rights reserved.
# See Copyright.txt or http://gdcm.sourceforge.net/Copyright.html for details.
#
# This software is distributed WITHOUT ANY WARRANTY; without even
# the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the above copyright notice for more information.
#
############################################################################
import gdcm
import sys,os
class ProgressWatcher(gdcm.SimpleSubjectWatcher):
def ShowProgress(self, sender, event):
pe = gdcm.ProgressEvent.Cast(event)
print pe.GetProgress()
def EndFilter(self):
print "Yay ! I am done"
if __name__ == "__main__":
directory = sys.argv[1]
# Define the set of tags we are interested in
t1 = gdcm.Tag(0x8,0x8);
t2 = gdcm.Tag(0x10,0x10);
# Iterate over directory
d = gdcm.Directory();
nfiles = d.Load( directory );
if(nfiles == 0): sys.exit(1);
# System.Console.WriteLine( "Files:\n" + d.toString() );
filenames = d.GetFilenames()
# Get rid of any Warning while parsing the DICOM files
gdcm.Trace.WarningOff()
# instanciate Scanner:
sp = gdcm.Scanner.New();
s = sp.__ref__()
w = ProgressWatcher(s, 'Watcher')
s.AddTag( t1 );
s.AddTag( t2 );
b = s.Scan( filenames );
if(not b): sys.exit(1);
print "success" ;
#print s
pttv = gdcm.PythonTagToValue( s.GetMapping( filenames[1] ) )
pttv.Start()
# iterate until the end:
while( not pttv.IsAtEnd() ):
# get current value for tag and associated value:
# if tag was not found, then it was simply not added to the internal std::map
# Warning value can be None
tag = pttv.GetCurrentTag()
value = pttv.GetCurrentValue()
print tag,"->",value
# increment iterator
pttv.Next()
sys.exit(0)
| 27.785714 | 81 | 0.62365 |
c9120b8f6b477e144300821ea9dd134a068eecbe | 86,166 | py | Python | app/main/routes.py | Evantually/cash-and-associates | ffccaf8d1018b312296adc6d432f802fd825b483 | [
"MIT"
] | null | null | null | app/main/routes.py | Evantually/cash-and-associates | ffccaf8d1018b312296adc6d432f802fd825b483 | [
"MIT"
] | null | null | null | app/main/routes.py | Evantually/cash-and-associates | ffccaf8d1018b312296adc6d432f802fd825b483 | [
"MIT"
] | 1 | 2021-08-21T12:42:57.000Z | 2021-08-21T12:42:57.000Z | from datetime import datetime, timedelta
from flask import render_template, flash, redirect, url_for, request, g, \
jsonify, current_app
from flask_login import current_user, login_required, login_user
from flask_babel import _, get_locale
from langdetect import detect, LangDetectException
from sqlalchemy import func
from sqlalchemy.sql import text
from app import db
from app.main.forms import (EditProfileForm, EmptyForm, AddProductForm,
DeleteForm, AddTransactionForm, AddCategoryForm, AddCompanyForm, AddEmployeeForm,
AddJobForm, ManageSubscriptionForm, ManageUserForm, ManageRacerForm, AddCarForm,
AddOwnedCarForm, AddTrackForm, SetupRaceForm, RaceSignupForm, EditOwnedCarForm,
EditRaceForm, AddCrewForm, AddToRaceForm, RacerSelectForm, RacerManageSelectForm,
EncryptedMessageForm, AddCalendarEventForm)
from app.models import (User, Transaction, Product, Category, Company,
Inventory, Job, HuntingEntry, FishingEntry, PostalEntry,
BlackjackHand, BlackjackEntry, Car, OwnedCar, Track, Race,
RacePerformance, Crew, CrewResults, Notification, Message,
LapTime, Achievement, TrackRating, CalendarEvent, Policy,
PolicyRating)
from app.translate import translate
from app.main import bp
from app.main.utils import (organize_data_by_date, summarize_data, format_currency, setup_company,
summarize_job, moving_average, clear_temps, blackjack_cards,
get_available_classes, determine_crew_points, get_timezones,
post_to_discord, calculate_crew_points, async_check_achievements,
calculate_payouts, convert_from_milliseconds, post_encrypted_message,
post_cancel_to_discord, post_calendar_event_to_discord,
parse_urls, check_if_image)
from flask_wtf.csrf import CSRFProtect
csrf = CSRFProtect()
@bp.before_app_request
def before_request():
if current_user.is_authenticated:
current_user.last_seen = datetime.utcnow()
db.session.commit()
g.locale = str(get_locale())
@bp.route('/', methods=['GET', 'POST'])
def landing_page():
return render_template('landing_page.html')
@bp.route('/index', methods=['GET', 'POST'])
@login_required
def index():
if current_user.access_level in ('manager'):
subquery = [u.id for u in User.query.filter(User.company == current_user.company).all()]
transactions = Transaction.query.filter(Transaction.user_id.in_(subquery)).order_by(Transaction.timestamp.desc()).all()
revenue = Transaction.query.filter(Transaction.user_id.in_(subquery)).filter_by(transaction_type='Revenue').order_by(Transaction.timestamp.desc()).all()
expenses = Transaction.query.filter(Transaction.user_id.in_(subquery)).filter_by(transaction_type='Expense').order_by(Transaction.timestamp.desc()).all()
else:
transactions = Transaction.query.filter_by(user_id=current_user.id).order_by(Transaction.timestamp.desc()).all()
revenue = Transaction.query.filter_by(user_id=current_user.id).filter_by(transaction_type='Revenue').order_by(Transaction.timestamp.desc()).all()
expenses = Transaction.query.filter_by(user_id=current_user.id).filter_by(transaction_type='Expense').order_by(Transaction.timestamp.desc()).all()
revenue_info = organize_data_by_date(revenue)
expense_info = organize_data_by_date(expenses)
transaction_info, transactions = summarize_data(transactions)
balance = (revenue_info['sum'] - expense_info['sum'],format_currency(revenue_info['sum'] - expense_info['sum']))
revenue_info['sum'] = format_currency(revenue_info['sum'])
expense_info['sum'] = format_currency(expense_info['sum'])
return render_template('index.html', title=_('Home'), revenue=revenue, transactions=transactions,
revenue_info=revenue_info, expenses=expenses,
expense_info=expense_info, balance=balance)
# CALENDAR START
@bp.route('/calendar', methods=['GET'])
@csrf.exempt
def calendar():
return render_template('calendar.html')
@bp.route('/calendar_events', methods=['GET'])
def calendar_events():
output = {"entries": []}
events = CalendarEvent.query.all()
for event in events:
urls = parse_urls(event.description)
images = check_if_image(urls)
description = event.description
for image in images:
description = description.replace(image, '')
event_info = {
"id": event.id,
"google_id": event.google_id,
"user_id": event.user_id,
"author": event.author_name,
"start": event.start,
"end": event.end,
"title": event.title,
"description": description,
"company": event.company,
"image": event.image,
"location": event.location,
"cost": event.cost,
"category": event.category,
"images": images
}
output["entries"].append(event_info)
return jsonify(output)
@bp.route('/add_calendar_event', methods=['GET','POST'])
@csrf.exempt
@login_required
def add_calendar_event():
form = AddCalendarEventForm()
if form.validate_on_submit():
start_utc_time_init = form.start_utc.data.replace('T',' ')
start_utc_time = start_utc_time_init.replace('Z','')
end_utc_time_init = form.end_utc.data.replace('T',' ')
end_utc_time = end_utc_time_init.replace('T', ' ')
starttime = datetime.strptime(start_utc_time, f'%Y-%m-%d %H:%M:%S')
event = CalendarEvent(start=start_utc_time, end=end_utc_time,
title=form.title.data, description=form.description.data,
company=form.company.data, image=form.image.data,
user_id=current_user.id, location=form.location.data,
cost=form.cost.data, author_name=form.author.data)
db.session.add(event)
db.session.commit()
post_calendar_event_to_discord(event)
flash('The event has been added to the calendar.')
return redirect(url_for('main.calendar'))
return render_template('add_product.html', title="Add Calendar Event", form=form)
@bp.route('/edit_calendar_event/<event_id>', methods=['GET','POST'])
@login_required
def edit_calendar_event(event_id):
event = CalendarEvent.query.filter_by(id=event_id).first()
start_utc = datetime.strftime(event.start, f'%Y-%m-%dT%H:%M:%SZ')
end_utc = datetime.strftime(event.end, f'%Y-%m-%dT%H:%M:%SZ')
form = AddCalendarEventForm(start_utc=start_utc, end_utc=end_utc,
title=event.title, description=event.description,
company=event.company, image=event.image,
location=event.location, cost=event.cost,
author=event.author_name)
if form.validate_on_submit():
if form.delete_event.data:
event.description = form.deletion_reason.data
db.session.commit()
post_calendar_event_to_discord(event, deleted=True)
db.session.delete(event)
db.session.commit()
flash('The event has been deleted.')
return redirect(url_for('main.calendar'))
start_utc_time_init = form.start_utc.data.replace('T',' ')
start_utc_time = start_utc_time_init.replace('Z','')
end_utc_time_init = form.end_utc.data.replace('T',' ')
end_utc_time = end_utc_time_init.replace('T', ' ')
event.start = start_utc_time
event.end = end_utc_time
event.title = form.title.data
event.description = form.description.data
event.company = form.company.data
event.image = form.image.data
event.user_id=current_user.id
event.location = form.location.data
event.cost = form.cost.data
event.author_name = form.author.data
db.session.commit()
flash('The event has been updated.')
post_calendar_event_to_discord(event, update=True)
return redirect(url_for('main.calendar'))
return render_template('add_product.html', title="Edit Calendar Event", form=form)
@bp.route('/add_calendar_event/<start>/<end>/<start_utc>/<end_utc>', methods=['GET','POST'])
@login_required
def add_calendar_event_ui(start, end, start_utc, end_utc):
st = datetime.strptime(start, f'%Y-%m-%dT%I:%M%p')
en = datetime.strptime(end, f'%Y-%m-%dT%I:%M%p')
start_utc = start_utc.replace('+00:00','Z')
end_utc = end_utc.replace('+00:00','Z')
# st_fm = st.strftime(f'%Y-%m-%dT%H:%M:%S%z')
# en_fm = en.strftime(f'%Y-%m-%dT%H:%M:%S%z')
form = AddCalendarEventForm(start=st, end=en, start_utc=start_utc, end_utc=end_utc)
if form.validate_on_submit():
start_utc_time_init = form.start_utc.data.replace('T',' ')
start_utc_time = start_utc_time_init.replace('Z','')
end_utc_time_init = form.end_utc.data.replace('T',' ')
end_utc_time = end_utc_time_init.replace('T', ' ')
starttime = datetime.strptime(start_utc_time, f'%Y-%m-%d %H:%M:%S')
event = CalendarEvent(start=start_utc_time, end=end_utc_time,
title=form.title.data, description=form.description.data,
company=form.company.data, image=form.image.data,
user_id=current_user.id, location=form.location.data,
cost=form.cost.data, author_name=form.author.data)
db.session.add(event)
db.session.commit()
post_calendar_event_to_discord(event)
flash('The event has been added to the calendar.')
return redirect(url_for('main.calendar'))
return render_template('add_product.html', title="Add Calendar Event", form=form)
# CALENDAR END
# POLICY START
@bp.route('/enacted_policy', methods=['GET'])
def enacted_policy():
return render_template('index.html')
@bp.route('/suggest_policy', methods=['GET', 'POST'])
@login_required
def suggest_policy():
return render_template('index.html')
# POLICY END
@bp.route('/user/<username>')
@login_required
def user(username):
user = User.query.filter_by(username=username).first_or_404()
products = Product.query.filter_by(user_id=user.id).filter_by(sales_item=True).order_by(Product.name).all()
transactions = Transaction.query.filter_by(user_id=user.id).order_by(Transaction.timestamp.desc()).all()
form = EmptyForm()
return render_template('user.html', user=user,
products=products, transactions=transactions)
@bp.route('/add_product', methods=['GET', 'POST'])
@login_required
def add_product():
form = AddProductForm()
if form.validate_on_submit():
product = Product(name=form.product.data, price=form.price.data,
user_id=current_user.id, img_url=form.img_url.data,
sales_item=form.sales_item.data)
if form.company_item.data:
product.company_id = current_user.company
db.session.add(product)
db.session.commit()
flash(f'{product.name} has been successfully added.')
return redirect(url_for('main.add_product'))
return render_template('add_product.html', title=_('Add Product'),
form=form)
@bp.route('/add_transaction', methods=['GET', 'POST'])
@login_required
def add_transaction():
form = AddTransactionForm()
form.product.choices = [("", "---")]+[(s.id, s.name) for s in Product.query.filter_by(company_id=current_user.company).all()]
if form.validate_on_submit():
if form.product.data == "":
product_id = None
else:
product_id = form.product.data
try:
product_name = Product.query.filter_by(id=product_id).first().name
except AttributeError:
product_name=None
transaction = Transaction(name=form.name.data, transaction_type=str(form.transaction_type.data), product=product_id,
product_name=product_name,
user_id=current_user.id, price=int(form.price.data), quantity=int(form.quantity.data),
total=int(form.price.data)*int(form.quantity.data), category=str(form.category.data),
details=form.description.data)
if form.inventory.data:
inv = Inventory.query.filter_by(product_id=product_id).first()
if inv is None:
inv = Inventory(quantity=form.quantity.data, product_id=product_id, company_id=current_user.company)
db.session.add(inv)
else:
inv.quantity += form.quantity.data
db.session.add(transaction)
db.session.commit()
flash(f'Your transaction has been successfully added.')
return redirect(url_for('main.add_transaction'))
return render_template('add_product.html', title=_('Add Transaction'),
form=form)
# BEGIN ADMIN AREA
@bp.route('/add_category', methods=['GET', 'POST'])
@login_required
def add_category():
if current_user.access_level == 'admin' or current_user.company == 1:
form = AddCategoryForm()
if form.validate_on_submit():
category = Category(name=form.category.data)
db.session.add(category)
db.session.commit()
flash(f'{category.name} has been added as a category.')
return redirect(url_for('main.add_category'))
return render_template('add_product.html', title=_('Add Category'),
form=form)
flash('You do not have access to add a category.')
return redirect(url_for('main.index'))
@bp.route('/add_company', methods=['GET', 'POST'])
@login_required
def add_company():
if current_user.access_level != 'admin':
flash('You do not have access to add a company.')
return redirect(url_for('main.index'))
form = AddCompanyForm()
if form.validate_on_submit():
manager = form.manager.data
company = Company(name=form.name.data)
db.session.add(company)
db.session.commit()
company = Company.query.filter_by(name=form.name.data).first()
manager.company = company.id
db.session.merge(manager)
manager.access_level = 'manager'
db.session.merge(manager)
db.session.commit()
flash(f'{company.name} has been added as a company.')
return redirect(url_for('main.add_company'))
return render_template('add_product.html', title=_('Add Company'),
form=form)
# Subscription Management
@bp.route('/manage_subscriptions', methods=['GET','POST'])
@login_required
def manage_user():
if current_user.access_level == 'admin' or current_user.company == 1:
form = ManageUserForm()
if form.validate_on_submit():
user_id = form.user.data.id
return redirect(url_for('main.manage_subscriptions', user_id=user_id))
return render_template('add_product.html', title='Manage Subscriptions', form=form)
else:
flash('You do not have access to this page.')
return redirect(url_for('main.index'))
@bp.route('/manage_subscriptions/<user_id>', methods=['GET','POST'])
@login_required
def manage_subscriptions(user_id):
if current_user.access_level == 'admin' or current_user.company == 1:
user = User.query.filter_by(id=user_id).first_or_404()
form = ManageSubscriptionForm(hunter=user.hunter, fisher=user.fisher, postal=user.postal,
blackjack=user.blackjack, personal=user.personal, business=user.business,
jrp=user.jrp, nd=user.nd, auto_renew=user.auto_renew)
if form.validate_on_submit():
user.hunter = form.hunter.data
user.fisher = form.fisher.data
user.postal = form.postal.data
user.personal = form.personal.data
user.business = form.business.data
user.blackjack = form.blackjack.data
user.jrp = form.jrp.data
user.nd = form.nd.data
user.auto_renew = form.auto_renew.data
if form.extend.data:
if user.sub_expiration > datetime.utcnow():
user.sub_expiration = user.sub_expiration + timedelta(days=form.sub_length.data)
else:
user.sub_expiration = datetime.utcnow() + timedelta(days=form.sub_length.data)
db.session.commit()
flash(f'Subscription info updated for {user.username}')
return redirect(url_for('main.active_subscriptions'))
return render_template('add_product.html', title=f'Manage Subscriptions - {user.username}', form=form)
else:
flash('You do not have access to this page.')
return redirect(url_for('main.index'))
@bp.route('/active_subscriptions', methods=['GET'])
@login_required
def active_subscriptions():
if current_user.access_level == 'admin' or current_user.company == 1:
expired_subs = User.query.filter(User.sub_expiration < datetime.utcnow()).order_by(User.sub_expiration).all()
active_subs = User.query.filter(User.sub_expiration >= datetime.utcnow()).order_by(User.sub_expiration).all()
return render_template('active_subscriptions.html',active_subs=active_subs, expired_subs=expired_subs)
else:
flash('You do not have access to this page.')
return redirect(url_for('main.index'))
@bp.route('/clear_temps', methods=['GET'])
@login_required
def clear_temps():
if current_user.access_level == 'admin' or current_user.company == 1:
clear_temps()
flash('Temporary accounts have been successfully cleared.')
return redirect(url_for('main.index'))
flash('You do not have access to this page.')
return redirect(url_for('main.index'))
# END ADMIN AREA
# BEGIN BUSINESS MANAGER AREA
@bp.route('/set_employees', methods=['GET', 'POST'])
@login_required
def set_employees():
if current_user.access_level not in ('admin', 'manager'):
flash('You do not have access to add employees. If you are a manager, talk to Luca or Naomi')
return redirect(url_for('main.index'))
form = AddEmployeeForm()
if form.validate_on_submit():
employee = User.query.filter_by(username=form.employee.data.username).first()
employee.company = current_user.company
employee.access_level = 'employee'
db.session.merge(employee)
db.session.commit()
company = Company.query.filter_by(id=current_user.company).first()
flash(f'{employee.username} has been added as an employee of {company.name}.')
return redirect(url_for('main.set_employees'))
return render_template('add_product.html', title=_('Add Employees'),
form=form)
@bp.route('/transaction_history', methods=['GET', 'POST'])
@login_required
def transaction_history():
if current_user.access_level in ('admin', 'manager', 'temp'):
subquery = [u.id for u in User.query.filter(User.company == current_user.company).all()]
transactions = Transaction.query.filter(Transaction.user_id.in_(subquery)).order_by(Transaction.timestamp.desc()).all()
transaction_info, transactions = summarize_data(transactions)
return render_template('transaction_history.html', transactions=transactions, tr_info=transaction_info)
if current_user.company is None:
transactions = Transaction.query.filter_by(user_id=current_user.id).all()
transaction_info, transactions = summarize_data(transactions)
return render_template('transaction_history.html', transactions=transactions, tr_info=transaction_info)
else:
flash('You do not have access to the full transaction history. If you are a manager, talk to Luca or Naomi.')
return redirect(url_for('main.index'))
#END BUSINESS MANAGER AREA
@bp.route('/delete_product/<product_id>', methods=['GET', 'POST'])
@login_required
def delete_product(product_id):
form = DeleteForm()
if form.validate_on_submit():
product = Product.query.filter_by(id=product_id).first_or_404()
if current_user.id == Product.query.filter_by(id=product_id).first().user_id:
db.session.delete(product)
db.session.commit()
flash(f'{product.name} has been deleted successfully.')
return redirect(url_for('main.add_product'))
elif current_user.company == Product.query.filter_by(id=product_id).first().company_id and current_user.access_level == 'manager':
db.session.delete(product)
db.session.commit()
flash(f'{product.name} has been deleted successfully.')
return redirect(url_for('main.add_product'))
else:
flash('You do not have authority to delete this product.')
return redirect(url_for('main.add_product'))
return render_template('add_product.html', title=_('Delete Product'),
form=form)
@bp.route('/delete_transaction/<transaction_id>', methods=['GET', 'POST'])
@login_required
def delete_transaction(transaction_id):
form = DeleteForm()
transaction = Transaction.query.filter_by(id=transaction_id).first_or_404()
if form.validate_on_submit():
if current_user.id == Transaction.query.filter_by(id=transaction_id).first().user_id:
db.session.delete(transaction)
db.session.commit()
flash(f'{transaction.product_name} has been deleted successfully.')
return redirect(url_for('main.add_product'))
else:
flash('You do not have authority to delete this transaction.')
return redirect(url_for('main.index'))
return render_template('delete_transaction.html', title=_('Delete Transaction'),
form=form, transaction=transaction)
@bp.route('/point_of_sale')
@login_required
def point_of_sale():
if current_user.company == None:
products = Product.query.filter_by(user_id=current_user.id).filter_by(sales_item=True).order_by(Product.name).all()
inventory = Inventory.query.filter_by(company_id=current_user.company).all()
else:
products = Product.query.filter_by(company_id=current_user.company).filter_by(sales_item=True).order_by(Product.name).all()
inventory = Inventory.query.filter_by(company_id=current_user.company).all()
return render_template('point_of_sale.html', products=products, inventory=inventory, user=current_user)
@bp.route('/purchase_inventory', methods=['GET', 'POST'])
@login_required
def purchase_inventory():
form = PurchaseInventoryForm()
if form.validate_on_submit():
inventory = Inventory(user_id=current_user.id, product_id=form.product.data.id,
quantity=form.quantity.data, price_paid=form.price_paid.data)
db.session.add(inventory)
db.session.commit()
flash(f'{form.quantity.data} {form.product.data} have been added to the inventory of {current_user.username}')
return redirect(url_for('main.purchase_inventory'))
return render_template('add_product.html', title='Purchase Inventory', form=form)
@bp.route('/edit_profile', methods=['GET', 'POST'])
@login_required
def edit_profile():
form = EditProfileForm(current_user.username)
if form.validate_on_submit():
current_user.username = form.username.data
current_user.about_me = form.about_me.data
db.session.commit()
flash(_('Your changes have been saved.'))
return redirect(url_for('main.edit_profile'))
elif request.method == 'GET':
form.username.data = current_user.username
form.about_me.data = current_user.about_me
return render_template('edit_profile.html', title=_('Edit Profile'),
form=form)
@bp.route('/post_sale', methods=['POST'])
@login_required
def post_sale():
product = Product.query.filter_by(id=request.form['product_id']).first_or_404()
try:
price = int(request.form['cost'])
except ValueError:
price = product.price
try:
quantity = int(request.form['quantity'])
except ValueError:
quantity = 1
inv = Inventory.query.filter_by(product_id=product.id).first()
if inv is not None:
inv.quantity -= quantity
amount = inv.quantity
else:
amount = 0
transaction = Transaction(transaction_type='Revenue', name=f'{product.name} sale', product=product.id,
product_name=product.name, user_id=current_user.id,
price=price, quantity=quantity, total=price*quantity, category='Sales',
details=request.form['description'])
db.session.add(transaction)
db.session.commit()
return jsonify({'text': f'The sale of {quantity} {product.name}{"" if quantity == 1 else "s"} for ${price} each has been recorded (${price*quantity} total).',
'quantity': amount})
@bp.route('/tutorials')
def tutorials():
return render_template('tutorial.html')
@bp.route('/add_product_tutorial')
def add_product_tutorial():
return render_template('add_product_tutorial.html')
@bp.route('/add_transaction_tutorial')
def add_transaction_tutorial():
return render_template('add_transaction_tutorial.html')
@bp.route('/recording_sales_tutorial')
def recording_sales_tutorial():
return render_template('recording_sales_tutorial.html')
@bp.route('/changelogs')
def changelogs():
return render_template('changelogs.html')
@bp.route('/roadmap')
def roadmap():
return render_template('roadmap.html')
@bp.route('/test/<business>', methods=['GET', 'POST'])
def test(business):
user = setup_company(business)
login_user(user)
return redirect(url_for('main.point_of_sale'))
@bp.route('/fetch_info/<company_id>/<access_token>')
def fetch_info(company_id, access_token):
company = Company.query.filter_by(id=company_id).first_or_404()
if access_token == company.access_token:
# Add transactional info here to be spit out into json
return jsonify()
return 'Incorrect access token. Please check with your manager or C&A staff.'
# START JOB SECTION
@bp.route('/jobs', methods=['GET','POST'])
@login_required
def jobs():
try:
current_user.sub_expiration > datetime.utcnow()
except:
current_user.sub_expiration = datetime.utcnow() - timedelta(seconds=10)
if current_user.sub_expiration > datetime.utcnow() and (current_user.fisher or current_user.hunter or current_user.postal):
form = AddJobForm()
if form.validate_on_submit():
job = Job(name=form.name.data, job_type=form.trip_type.data, user_id=current_user.id)
db.session.add(job)
db.session.commit()
flash(f'{job.name} has been added.')
if job.job_type == 'Hunting':
return redirect(url_for('main.hunting_tracker', job_id=job.id))
elif job.job_type == 'Fishing':
return redirect(url_for('main.fishing_tracker', job_id=job.id))
elif job.job_type == 'Postal':
return redirect(url_for('main.postal_tracker', job_id=job.id))
elif job.job_type == 'Blackjack':
return redirect(url_for('main.blackjack_tracker', job_id=job.id))
return render_template('add_product.html',title='Start Job', form=form)
else:
flash('Please renew your subscription to keep using this service!')
return redirect(url_for('main.index'))
# HUNTING
@bp.route('/jobs/hunting/tracker/<job_id>')
@login_required
def hunting_tracker(job_id):
try:
current_user.sub_expiration > datetime.utcnow()
except:
current_user.sub_expiration = datetime.utcnow() - timedelta(seconds=10)
if current_user.sub_expiration > datetime.utcnow() and current_user.hunter:
job = Job.query.filter_by(id=job_id).first()
return render_template('hunting_tracker.html', job=job)
else:
flash('Please renew your subscription to keep using this service!')
return redirect(url_for('main.index'))
@bp.route('/jobs/hunting/view')
@login_required
def hunting_jobs():
jobs = Job.query.filter_by(user_id=current_user.id).filter_by(job_type='Hunting').order_by(Job.timestamp.desc()).all()
entries = HuntingEntry.query.filter_by(user_id=current_user.id).all()
ma_data, time_data, yield_data = moving_average(entries, 1440, 0, HuntingEntry)
return render_template('jobs_overview.html', jobs=jobs, values=ma_data, labels=time_data, yield_data=yield_data,
label=f'Daily Earnings ($)', label2='% Kills Yielding', job_type='Hunting')
@bp.route('/jobs/hunting/view/<job_id>')
@login_required
def hunting_view(job_id):
job = Job.query.filter_by(id=job_id).first_or_404()
entries = HuntingEntry.query.filter_by(job=job_id).order_by(HuntingEntry.timestamp).all()
ma_data, time_data, yield_data = moving_average(entries, 2, 30, HuntingEntry)
output = summarize_job(entries, 'Hunting')
job.total_earnings = output['total']
job.hourly_earnings = output['total_hour']
db.session.commit()
return render_template('job_view.html', output=output, entries=entries,
values=ma_data, labels=time_data, yield_data=yield_data, label=f'5 Minute Earnings ($)',
label2='% Kills Yielding', job_type='Hunting')
@bp.route('/jobs/hunting/tracker/add_entry', methods=['POST'])
@login_required
def add_hunting_entry():
job = Job.query.filter_by(id=request.form['job_id']).first()
if request.form['coll'] == 0:
coll = False
else:
coll = True
sell_value = (int(request.form['meat']) * 65) + (int(request.form['smpelt']) * 100) + (int(request.form['medpelt']) * 110) + (int(request.form['lgpelt']) * 170)
entry = HuntingEntry(job=job.id, user_id=current_user.id, collateral=coll,
meat=request.form['meat'], small_pelt=request.form['smpelt'],
med_pelt=request.form['medpelt'], large_pelt=request.form['lgpelt'],
sell_value=sell_value)
db.session.add(entry)
db.session.commit()
return jsonify({'text': f'This entry has been recorded at {entry.timestamp}.'})
# END HUNTING
# FISHING
@bp.route('/jobs/fishing/view')
@login_required
def fishing_jobs():
jobs = Job.query.filter_by(user_id=current_user.id).filter_by(job_type='Fishing').order_by(Job.timestamp.desc()).all()
entries = FishingEntry.query.filter_by(user_id=current_user.id).all()
ma_data, time_data, yield_data = moving_average(entries, 1440, 0, FishingEntry)
return render_template('jobs_overview.html', jobs=jobs, values=ma_data, labels=time_data, yield_data=yield_data)
@bp.route('/jobs/fishing/tracker/<job_id>')
@login_required
def fishing_tracker(job_id):
try:
current_user.sub_expiration > datetime.utcnow()
except:
current_user.sub_expiration = datetime.utcnow() - timedelta(seconds=10)
if current_user.sub_expiration > datetime.utcnow() and current_user.fisher:
job = Job.query.filter_by(id=job_id).first_or_404()
return render_template('fishing_tracker.html', job=job)
else:
flash('Please renew your subscription to keep using this service!')
return redirect(url_for('main.index'))
@bp.route('/jobs/fishing/view/<job_id>')
@login_required
def fishing_view(job_id):
job = Job.query.filter_by(id=job_id).first_or_404()
entries = FishingEntry.query.filter_by(job=job_id).order_by(FishingEntry.timestamp).all()
ma_data, time_data, yield_data = moving_average(entries, 2, 30, FishingEntry)
output = summarize_job(entries, 'Fishing')
job.total_earnings = output['total']
job.hourly_earnings = output['total_hour']
db.session.commit()
return render_template('job_view.html', output=output, entries=entries,
values=ma_data, labels=time_data, yield_data=yield_data, label=f'5 Minute Earnings ($)', label2='% Caught Fish',
job_type='Fishing')
@bp.route('/jobs/fishing/tracker/add_entry', methods=['POST'])
@login_required
def add_fishing_entry():
job = Job.query.filter_by(id=request.form['job_id']).first()
sell_value = (int(request.form['fish']) * 115)
entry = FishingEntry(job=job.id, user_id=current_user.id,
fish=request.form['fish'], misc=request.form['misc'],
sell_value=sell_value)
db.session.add(entry)
db.session.commit()
return jsonify({'text': f'This entry has been recorded at {entry.timestamp}.'})
# END FISHING
# START GOPOSTAL
@bp.route('/jobs/postal/view')
@login_required
def postal_jobs():
jobs = Job.query.filter_by(user_id=current_user.id).filter_by(job_type='Postal').order_by(Job.timestamp.desc()).all()
entries = PostalEntry.query.filter_by(user_id=current_user.id).all()
ma_data, time_data, yield_data = moving_average(entries, 60, 0, PostalEntry)
return render_template('jobs_overview.html', jobs=jobs, values=ma_data, labels=time_data, yield_data=yield_data)
@bp.route('/jobs/postal/tracker/<job_id>')
@login_required
def postal_tracker(job_id):
try:
current_user.sub_expiration > datetime.utcnow()
except:
current_user.sub_expiration = datetime.utcnow() - timedelta(seconds=10)
if current_user.sub_expiration > datetime.utcnow() and current_user.postal:
job = Job.query.filter_by(id=job_id).first_or_404()
return render_template('postal_tracker.html', job=job)
else:
flash('Please renew your subscription to keep using this service!')
return redirect(url_for('main.index'))
@bp.route('/jobs/postal/view/<job_id>')
@login_required
def postal_view(job_id):
job = Job.query.filter_by(id=job_id).first_or_404()
entries = PostalEntry.query.filter_by(job=job_id).order_by(PostalEntry.timestamp).all()
ma_data, time_data, yield_data = moving_average(entries, 2, 30, PostalEntry)
output = summarize_job(entries, 'Postal')
job.total_earnings = output['total']
job.hourly_earnings = output['total_hour']
db.session.commit()
return render_template('job_view.html', output=output, entries=entries,
values=ma_data, labels=time_data, yield_data=yield_data, label=f'5 Minute Earnings ($)', label2='% Packages Accepted')
@bp.route('/jobs/postal/tracker/add_entry', methods=['POST'])
@login_required
def add_postal_entry():
job = Job.query.filter_by(id=int(request.form['job_id'])).first()
sell_value = int(request.form['pay'])
entry = PostalEntry(job=job.id, user_id=current_user.id,
no_pay=(request.form['no_pay'] == 'true'),
sell_value=sell_value)
db.session.add(entry)
db.session.commit()
return jsonify({'text': f'This entry has been recorded at {entry.timestamp}.'})
# END GOPOSTAL
# START MINING
@bp.route('/jobs/mining/view')
@login_required
def mining_jobs():
jobs = Job.query.filter_by(user_id=current_user.id).filter_by(job_type='Mining').order_by(Job.timestamp.desc()).all()
entries = MiningEntry.query.filter_by(user_id=current_user.id).all()
ma_data, time_data, yield_data = moving_average(entries, 60, 0, MiningEntry)
return render_template('jobs_overview.html', jobs=jobs, values=ma_data, labels=time_data, yield_data=yield_data)
@bp.route('/jobs/mining/tracker/<job_id>')
@login_required
def mining_tracker(job_id):
try:
current_user.sub_expiration > datetime.utcnow()
except:
current_user.sub_expiration = datetime.utcnow() - timedelta(seconds=10)
if current_user.sub_expiration > datetime.utcnow() and current_user.mining:
job = Job.query.filter_by(id=job_id).first()
return render_template('mining_tracker.html', job=job)
else:
flash('Please renew your subscription to keep using this service!')
return redirect(url_for('main.index'))
@bp.route('/jobs/mining/view/<job_id>')
@login_required
def mining_view(job_id):
job = Job.query.filter_by(id=job_id).first_or_404()
entries = MiningEntry.query.filter_by(job=job_id).order_by(MiningEntry.timestamp).all()
ma_data, time_data, yield_data = moving_average(entries, 2, 30, MiningEntry)
output = summarize_job(entries, 'mining')
job.total_earnings = output['total']
job.hourly_earnings = output['total_hour']
db.session.commit()
return render_template('job_view.html', output=output, entries=entries,
values=ma_data, labels=time_data, yield_data=yield_data, label=f'5 Minute Earnings ($)', label2='% Packages Accepted')
@bp.route('/jobs/mining/tracker/add_entry', methods=['POST'])
@login_required
def add_mining_entry():
job = Job.query.filter_by(id=int(request.form['job_id'])).first()
sell_value = int(request.form['pay'])
entry = MiningEntry(job=job.id, user_id=current_user.id,
no_pay=(request.form['no_stone'] == 'true'),
sell_value=sell_value)
db.session.add(entry)
db.session.commit()
return jsonify({'text': f'This entry has been recorded at {entry.timestamp}.'})
# END FISHING
@bp.route('/dashboard')
def dashboard():
return render_template('dashboard.html')
# END JOB SECTION
# START CASINO SECTION
@bp.route('/jobs/blackjack/view')
@login_required
def blackjack_jobs():
jobs = Job.query.filter_by(user_id=current_user.id).filter_by(job_type='Blackjack').order_by(Job.timestamp.desc()).all()
return render_template('jobs_overview.html', jobs=jobs)
@bp.route('/blackjack_tracker/<job_id>', methods=['GET'])
@login_required
def blackjack_tracker(job_id):
try:
current_user.sub_expiration > datetime.utcnow()
except:
current_user.sub_expiration = datetime.utcnow() - timedelta(seconds=10)
if current_user.sub_expiration > datetime.utcnow() and current_user.blackjack:
cards = blackjack_cards()
return render_template('blackjack_tracker.html', cards=cards, job_id=job_id, user_id=current_user.id)
else:
flash('Please renew your subscription to keep using this service!')
return redirect(url_for('main.index'))
@bp.route('/blackjack_checker/<entry>', methods=['GET'])
@login_required
def blackjack_checker(entry):
cards = blackjack_cards()
entries = BlackjackHand.query.filter_by(blackjack_entry=entry).all()
return render_template('blackjack_checker.html', cards=cards, entries=entries)
@bp.route('/blackjack_decision', methods=['POST'])
@login_required
def blackjack_decision():
pass
@bp.route('/blackjack/add_entry', methods=['POST'])
@login_required
def add_blackjack_entry():
job = Job.query.filter_by(id=int(request.form['job_id'])).first_or_404()
entry = BlackjackEntry(user_id=int(request.form['user_id']), job=int(request.form['job_id']))
db.session.add(entry)
player_hand = BlackjackHand(blackjack_entry=entry.id, player_hand=True)
dealer_hand = BlackjackHand(blackjack_entry=entry.id)
player_cards = request.form.getlist('player_cards[]')
dealer_cards = request.form.getlist('dealer_cards[]')
for card in player_cards:
setattr(player_hand, card, True)
for card in dealer_cards:
setattr(dealer_hand, card, True)
db.session.add(player_hand)
db.session.add(dealer_hand)
db.session.commit()
return jsonify({'text': f'This entry has been recorded at {entry.timestamp}.'})
# END CASINO SECTION
# START RACE SECTION
# RACE LEAD SECTION
# TESTING ROUTES
@bp.route('/add_to_race', methods=['GET', 'POST'])
@login_required
def add_to_race():
form = AddToRaceForm()
ocs = OwnedCar.query.all()
races = Race.query.all()
form.car.choices = [(oc.id, oc.name) for oc in ocs]
form.race.choices = [(race.id, race.name) for race in races]
if form.validate_on_submit():
race = Race.query.filter_by(id=form.race.data).first()
car = OwnedCar.query.filter_by(id=form.car.data).first()
rp = RacePerformance(user_id=car.user_id, car_id=car.car_id, car_details=car.id,
track_id=race.track_info.id, race_id=race.id)
db.session.add(rp)
db.session.commit()
flash(f'{car.name} has been added to {race.name}.')
return redirect(url_for('main.add_to_race'))
return render_template('add_product.html', form=form)
# END TESTING ROUTES
# START ADD STUFF
@bp.route('/add_car', methods=['GET', 'POST'])
@login_required
def add_car():
form = AddCarForm()
if current_user.race_lead:
if form.validate_on_submit():
car = Car(name=form.name.data, car_class=form.car_class.data,
make=form.make.data, model=form.model.data,
drivetrain=form.drivetrain.data, image=form.image.data)
db.session.add(car)
db.session.commit()
flash(f'{car.name} has been added.')
return redirect(url_for('main.add_car'))
else:
flash('You do not have access to add cars.')
return redirect(url_for('main.index'))
return render_template('add_product.html', title=_('Add Car'),
form=form)
@bp.route('/add_track', methods=['GET', 'POST'])
@login_required
def add_track():
form = AddTrackForm()
if current_user.race_lead:
if form.validate_on_submit():
track = Track(name=form.name.data, track_map=form.track_map.data,
track_video=form.track_video.data, lap_race=form.lap_race.data,
embed_link=form.embed_link.data, meet_location=form.meet_location.data)
db.session.add(track)
db.session.commit()
flash(f'{track.name} has been added as a track.')
return redirect(url_for('main.add_track'))
else:
flash('You do not have access to add tracks.')
return redirect(url_for('main.index'))
return render_template('add_product.html', title=_('Add Track'),
form=form)
@bp.route('/add_crew', methods=['GET', 'POST'])
@login_required
def add_crew():
if current_user.race_lead:
form = AddCrewForm()
if form.validate_on_submit():
crew = Crew(name=form.name.data, points=100,
image=form.image.data, track_id=form.home_track.data.id)
db.session.add(crew)
db.session.commit()
track = Track.query.filter_by(id=crew.track_id).first()
track.crew_id = crew.id
db.session.commit()
flash(f'{crew.name} has been added as a crew.')
return redirect(url_for('main.add_crew'))
return render_template('add_product.html', title=_('Add Crew'),
form=form)
flash('You do not have access to add crews.')
return redirect(url_for('main.index'))
# END ADD STUFF
# START MANAGE STUFF
@bp.route('/manage_cars', methods=['GET', 'POST'])
@login_required
def manage_cars():
if current_user.race_lead:
cars = Car.query.order_by(Car.name).all()
return render_template('cars.html', cars=cars, personal=False)
flash('You do not have access to this page.')
return redirect(url_for('main.index'))
@bp.route('/edit_car/<car_id>', methods=['GET', 'POST'])
@login_required
def edit_car(car_id):
if current_user.race_lead:
car = Car.query.filter_by(id=car_id).first_or_404()
form = AddCarForm(name=car.name, make=car.make, model=car.model,
car_class=car.car_class, drivetrain=car.drivetrain,
image=car.image)
if form.validate_on_submit():
if form.delete.data:
car.user_id = 245
db.session.commit()
flash('The car has been removed from your cars.')
return redirect(url_for('main.my_cars'))
car.name=form.name.data
car.make = form.make.data
car.model = form.model.data
car.car_class = form.car_class.data
car.drivetrain = form.drivetrain.data
car.image = form.image.data
db.session.commit()
flash(f'{car.name} has been updated successfully.')
return redirect(url_for('main.manage_cars'))
return render_template('add_product.html', form=form, title=f'Edit Car - {car.name}')
flash('You do not have access to this page.')
return redirect(url_for('main.index'))
@bp.route('/manage_tracks', methods=['GET', 'POST'])
@login_required
def manage_tracks():
if current_user.race_lead:
tracks = Track.query.order_by(Track.name).all()
return render_template('tracks.html', tracks=tracks, rating=TrackRating, func=func)
flash('You do not have access to this page.')
return redirect(url_for('main.index'))
@bp.route('/edit_track/<track_id>', methods=['GET', 'POST'])
@login_required
def edit_track(track_id):
if current_user.race_lead:
track = Track.query.filter_by(id=track_id).first_or_404()
form = AddTrackForm(name=track.name,track_map=track.track_map,
track_video=track.track_video, lap_race=track.lap_race,
embed_link=track.embed_link, meet_location=track.meet_location)
if form.validate_on_submit():
track.name = form.name.data
track.track_map = form.track_map.data
track.track_video = form.track_video.data
track.lap_race = form.lap_race.data
track.embed_link = form.embed_link.data
track.meet_location = form.meet_location.data
track.disabled = form.disabled.data
db.session.commit()
flash(f'{track.name} has been updated successfully.')
return redirect(url_for('main.manage_tracks'))
return render_template('add_product.html', form=form, title=f'Edit Track - {track.name}')
flash('You do not have access to this page.')
return redirect(url_for('main.index'))
@bp.route('/setup_race', methods=['GET', 'POST'])
@login_required
def setup_race():
form = SetupRaceForm()
if current_user.race_lead or current_user.race_host:
if form.validate_on_submit():
utc_time_init = form.utc_time.data.replace('T',' ')
utc_time = utc_time_init.replace('Z','')
race = Race(name=form.name.data, start_time=datetime.strptime(utc_time,'%Y-%m-%d %H:%M:%S'),
laps=form.laps.data, track=form.track.data.id,
highest_class=form.highest_class.data, crew_race=form.octane_crew.data,
buyin=form.buyin.data, octane_member=form.octane_member.data,
octane_prospect=form.octane_prospect.data, octane_crew=form.octane_crew.data,
open_249=form.open_249.data, new_blood_249=form.new_blood_249.data,
offroad_249=form.offroad_249.data, moto_249=form.moto_249.data,
challenging_crew_id=form.challenging_crew.data.id, defending_crew_id=form.defending_crew.data.id,
octane_newcomer=form.octane_newcomer.data, octane_community=form.octane_community.data)
db.session.add(race)
track = Track.query.filter_by(id=form.track.data.id).first()
try:
track.times_ran += 1
except:
track.times_ran = 1
db.session.commit()
post_to_discord(race)
flash(f'{race.name} has been setup.')
if race.crew_race:
return redirect(url_for('main.manage_crew_race', race_id=race.id))
return redirect(url_for('main.manage_race', race_id=race.id))
else:
flash('You do not have access to setup races.')
return redirect(url_for('main.index'))
return render_template('add_product.html', title=_('Setup Race'),
form=form)
@bp.route('/manage_racers', methods=['GET','POST'])
@login_required
def manage_racer_perms():
if current_user.access_level == 'admin' or current_user.race_lead:
form = RacerManageSelectForm()
if form.validate_on_submit():
user_id = form.user.data.id
return redirect(url_for('main.manage_racers', user_id=user_id))
return render_template('add_product.html', title='Manage Racers', form=form)
else:
flash('You do not have access to this page.')
return redirect(url_for('main.index'))
@bp.route('/manage_racers/<user_id>', methods=['GET','POST'])
@login_required
def manage_racers(user_id):
if current_user.access_level == 'admin' or current_user.race_lead:
user = User.query.filter_by(id=user_id).first_or_404()
form = ManageRacerForm(racer=user.racer, race_lead=user.race_lead, crew=user.crew_id, race_host=user.race_host)
form.crew.choices = [('','---')]+[(cr.id, cr.name) for cr in Crew.query.order_by(Crew.name).all()]
if form.validate_on_submit():
user.racer = form.racer.data
user.race_lead = form.race_lead.data
user.race_host = form.race_host.data
user.racer_updated = datetime.utcnow()
if form.crew.data:
user.crew_id = form.crew.data
else:
user.crew_id = None
db.session.commit()
flash(f'Racer info updated for {user.username}')
return redirect(url_for('main.manage_racer_perms'))
return render_template('add_product.html', title=f'Manage Racers - {user.username}', form=form)
else:
flash('You do not have access to this page.')
return redirect(url_for('main.index'))
@bp.route('/manage_race/<race_id>', methods=['GET','POST'])
@login_required
def manage_race(race_id):
if current_user.race_lead or current_user.race_host:
race = Race.query.filter_by(id=race_id).first_or_404()
racers = RacePerformance.query.filter_by(race_id=race.id).order_by(RacePerformance.end_position).all()
return render_template('race_manager.html', racers=racers, title=f'Manage Race - {race.name} | {race.highest_class}-Class | {race.track_info.name} | {f"{race.laps} Laps" if race.track_info.lap_race else "Sprint"}', race=race)
flash('You do not have access to this page.')
return redirect(url_for('main.index'))
@bp.route('/manage_crew_race/<race_id>', methods=['GET','POST'])
@login_required
def manage_crew_race(race_id):
if current_user.race_lead:
race = Race.query.filter_by(id=race_id).first_or_404()
racers = RacePerformance.query.filter_by(race_id=race.id).all()
crew_names = []
for racer in racers:
if racer.user_info.race_crew not in crew_names:
crew_names.append(racer.user_info.race_crew)
if len(crew_names) < 1:
crew_names = ['Crew 1', 'Crew 2']
return render_template('crew_race_manager.html', racers=racers,
crew_names=crew_names,
title=f'Manage Race - {race.name} | {race.highest_class}-Class | {race.track_info.name} | {race.laps} Laps', race=race)
flash('You do not have access to this page.')
return redirect(url_for('main.index'))
@bp.route('/edit_crew/<crew_id>', methods=['GET','POST'])
@login_required
def edit_crew(crew_id):
if current_user.race_lead:
crew = Crew.query.filter_by(id=crew_id).first_or_404()
form = AddCrewForm(name=crew.name, image=crew.image)
form.home_track.choices = [(tr.id, tr.name) for tr in Track.query.filter((Track.crew_id==None)|(Track.crew_id==crew.id)).all()]
if request.method == 'GET':
form.home_track.data = Track.query.filter_by(id=crew.track_id).first()
if form.validate_on_submit():
old_track = Track.query.filter_by(id=crew.track_id).first()
old_track.crew_id = None
db.session.commit()
track = Track.query.filter_by(id=form.home_track.data.id).first()
track.crew_id = crew.id
db.session.commit()
crew.track_id = form.home_track.data.id
crew.image = form.image.data
crew.name = form.name.data
db.session.commit()
flash(f'{crew.name} has been updated.')
return redirect(url_for('main.crew_info'))
return render_template('add_product.html', form=form, title=f'Edit Crew - {crew.name}')
flash('You do not have access to this page.')
return redirect(url_for('main.index'))
@bp.route('/edit_race/<race_id>', methods=['GET', 'POST'])
@login_required
def edit_race(race_id):
if current_user.race_lead or current_user.race_host:
race = Race.query.filter_by(id=race_id).first_or_404()
form = EditRaceForm(name=race.name, crew_race=race.crew_race,
laps=race.laps, track=race.track,
highest_class=race.highest_class)
if request.method == 'GET':
form.track.data = Track.query.filter_by(id=race.track).first()
if form.validate_on_submit():
if form.delete_race.data:
track = Track.query.filter_by(id=race.track).first()
track.times_ran -= 1
db.session.commit()
rps = RacePerformance.query.filter_by(race_id=race.id).all()
for rp in rps:
db.session.delete(rp)
db.session.commit()
post_cancel_to_discord(race)
db.session.delete(race)
db.session.commit()
flash('The race has been removed successfully.')
return redirect(url_for('main.upcoming_races'))
if form.track.data.id != race.track:
track = Track.query.filter_by(id=race.track).first()
track.times_ran -= 1
new_track = Track.query.filter_by(id=form.track.data.id).first()
new_track.times_ran += 1
db.session.commit()
race.name = form.name.data
race.track = form.track.data.id
race.laps = form.laps.data
race.highest_class = form.highest_class.data
race.crew_race = form.crew_race.data
race.challenging_crew_id = form.challenging_crew.data.id
race.defending_crew_id = form.defending_crew.data.id
db.session.commit()
flash(f'{race.name} has been updated successfully.')
return redirect(url_for('main.manage_race', race_id=race.id))
return render_template('add_product.html', form=form, title=f'Edit Race - {race.name}')
flash('You do not have access to this page.')
return redirect(url_for('main.index'))
# END MANAGE STUFF
# START API CALLS
@bp.route('/race/set_start_order', methods=['POST'])
@login_required
def set_start_order():
race_info = request.get_json()
racer = User.query.filter_by(id=race_info['auth_id']).first()
if racer.race_lead or racer.race_host:
racers = race_info['racer_order']
for index, racer in enumerate(racers):
rp = RacePerformance.query.filter_by(id=racer[0]).first()
rp.start_position = index + 1
db.session.commit()
return jsonify({'text': "Starting positions have saved successfully."})
return jsonify({'text': "You don't have sufficient privileges to set this information."})
@bp.route('/race/set_end_order', methods=['POST'])
@login_required
def set_end_order():
race_info = request.get_json()
racer = User.query.filter_by(id=race_info['auth_id']).first()
if racer.race_lead or racer.race_host:
racers = race_info['racer_order']
for index, racer in enumerate(racers):
try:
rp = RacePerformance.query.filter_by(id=racer[0]).first()
rp.end_position = index + 1
db.session.commit()
except AttributeError:
pass
return jsonify({'text': "Ending positions have saved successfully."})
return jsonify({'text': "You don't have sufficient privileges to set this information."})
@bp.route('/race/finalize_race', methods=['POST'])
@login_required
def finalize_race():
race_info = request.get_json()
racer = User.query.filter_by(id=race_info['auth_id']).first()
if racer.race_lead or racer.race_host:
racers = race_info['racer_order']
race = Race.query.filter_by(id=RacePerformance.query.filter_by(id=racers[0][0]).first().race_id).first()
racer_ids = []
for index, racer in enumerate(racers):
try:
rp = RacePerformance.query.filter_by(id=int(racer[0])).first()
racer_ids.append(rp.user_id)
rp.end_position = index + 1
db.session.commit()
for lap in racer[3]:
lt = LapTime(milliseconds=lap, race_id=race.id, user_id=racer[1],
car_id=racer[2], track_id=race.track_info.id, stock_id=OwnedCar.query.filter_by(id=racer[2]).first().car_id)
db.session.add(lt)
db.session.commit()
except AttributeError:
pass
dnfs = race_info['dnf_order']
for racer in dnfs:
try:
rp = RacePerformance.query.filter_by(id=racer[0]).first()
racer_ids.append(rp.user_id)
rp.end_position = 0
db.session.commit()
except AttributeError:
pass
calculate_payouts(race, race_info['prizepool'])
async_check_achievements(racer_ids, 'Race Finish')
if race.crew_race:
results = calculate_crew_points(race_info, True)
if CrewResults.query.filter_by(race_id=race.id).first():
cr = CrewResults.query.filter_by(race_id=race.id).first()
cr.challenging_crew_points=results['cc_score']
cr.defending_crew_points=results['dc_score']
db.session.commit()
else:
cr = CrewResults(race_id=race.id, challenging_crew=race.challenging_crew_id,
defending_crew=race.defending_crew_id, challenging_crew_points=results['cc_score'],
defending_crew_points=results['dc_score'])
db.session.add(cr)
db.session.commit()
race.finalized = True
db.session.commit()
return jsonify({'text': "The race has been finalized successfully."})
return jsonify({'text': "You don't have sufficient privileges to set this information."})
@bp.route('/get_crew_scores', methods=['POST'])
@login_required
def get_crew_scores():
race_info = request.get_json()
output = calculate_crew_points(race_info)
return output
@bp.route('/race/check_if_finished', methods=['POST'])
@login_required
def check_race_finish():
race_info = request.get_json()
race = Race.query.filter_by(id=race_info['race_id']).first()
if race:
if race.finalized:
return jsonify({'finalized': True})
return jsonify({'finalized': False})
@bp.route('/retrieve_racers', methods=['GET'])
@login_required
def retrieve_racers():
racers = User.query.filter_by(racer=True).all()
return jsonify({'racers': racers})
@bp.route('/track_records_retrieve', methods=['GET'])
@login_required
def track_records_retrieve():
track_info = {}
tracks = Track.query.all()
for track in tracks:
track_info[track.name] = []
for lap in LapTime.query.filter_by(track_id=track.id).all():
try:
entry = {
'track_id': track.id,
'track': track.name,
'racer': User.query.filter_by(id=lap.user_id).first().username,
'car_class': Car.query.filter_by(id=lap.stock_id).first().car_class,
'car': Car.query.filter_by(id=lap.stock_id).first().name,
'car_image': OwnedCar.query.filter_by(id=lap.car_id).first().image if OwnedCar.query.filter_by(id=lap.car_id).first().image else Car.query.filter_by(id=lap.stock_id).first().image,
'lap_time': lap.milliseconds
}
except AttributeError:
pass
track_info[track.name].append(entry)
return jsonify({'data': track_info})
@bp.route('/track_rating_submission', methods=['POST'])
@login_required
def track_rating_submission():
rating_info = request.get_json()
if RacePerformance.query.filter_by(user_id=rating_info['user_id']).filter_by(race_id=rating_info['race_id']).first():
check_rating = TrackRating.query.filter_by(user_id=rating_info['user_id']).filter_by(track_id=rating_info['track_id']).first()
if check_rating:
check_rating.rating = rating_info['rating']
db.session.commit()
return jsonify({'text': "Your rating has been updated."})
rating = TrackRating(rating=rating_info['rating'], user_id=rating_info['user_id'],
track_id=rating_info['track_id'], race_id=rating_info['race_id'])
db.session.add(rating)
db.session.commit()
return jsonify({'text': "Your rating has been recorded."})
return jsonify({'text': "You did not participate in this race."})
#END API CALLS
# END RACE LEAD SECTION
# START RACER SECTION
@bp.route('/crew_info', methods=['GET'])
@login_required
def crew_info():
if current_user.racer:
crews = Crew.query.order_by(Crew.points.desc()).all()
return render_template('crew_info.html', crews=crews, race_performance=RacePerformance,
crew_results=CrewResults, func=func, race=Race)
flash('You do not have access to this section. Talk to the appropriate person for access.')
return redirect(url_for('main.index'))
@bp.route('/upcoming_races', methods=['GET'])
@login_required
def upcoming_races():
if current_user.racer:
upcoming_races = Race.query.filter(Race.start_time > (datetime.utcnow() - timedelta(minutes=10))).all()
return render_template('upcoming_races.html', upcoming_races=upcoming_races)
flash('You do not have access to this section. Talk to the appropriate person for access.')
return redirect(url_for('main.index'))
@bp.route('/add_owned_car', methods=['GET', 'POST'])
@login_required
def add_owned_car():
form = AddOwnedCarForm()
if current_user.racer:
if form.validate_on_submit():
owned_car = OwnedCar(name=form.name.data, user_id=current_user.id, car_id=form.car.data.id,
engine_level=form.engine_level.data, transmission_level=form.transmission_level.data,
turbo_level=form.turbo_level.data, brakes_level=form.brakes_level.data,
suspension_level=form.suspension_level.data, image=form.image.data)
db.session.add(owned_car)
db.session.commit()
flash(f'{owned_car.name} has been added to your car inventory.')
return redirect(url_for('main.add_owned_car'))
else:
flash('You do not have access to this section. Talk to the appropriate person for access.')
return redirect(url_for('main.index'))
return render_template('add_product.html', title=_('Add Owned Car'),
form=form)
@bp.route('/my_cars', methods=['GET', 'POST'])
@login_required
def my_cars():
if current_user.racer:
cars = OwnedCar.query.filter_by(user_id=current_user.id).all()
return render_template('cars.html', cars=cars, personal=True)
flash('You do not have access to this section. Talk to the appropriate person for access.')
return redirect(url_for('main.index'))
@bp.route('/edit_owned_car/<car_id>', methods=['GET', 'POST'])
@login_required
def edit_owned_car(car_id):
if current_user.racer:
car = OwnedCar.query.filter_by(id=car_id).first_or_404()
form = EditOwnedCarForm(name=car.name, engine_level=car.engine_level,
transmission_level=car.transmission_level, turbo_level=car.turbo_level,
brakes_level=car.brakes_level, suspension_level=car.suspension_level,
image=car.image)
if request.method == 'GET':
form.car.data = Car.query.filter_by(id=car.car_id).first()
if form.validate_on_submit():
if form.delete.data:
rps = RacePerformance.query.filter_by(car_details=car.id).all()
for rp in rps:
db.session.delete(rp)
db.session.commit()
db.session.delete(car)
db.session.commit()
flash('The car has been removed from your cars.')
return redirect(url_for('main.my_cars'))
car.name = form.name.data
car.car_id = form.car.data.id
car.engine_level = form.engine_level.data
car.transmission_level = form.transmission_level.data
car.turbo_level = form.turbo_level.data
car.brakes_level = form.brakes_level.data
car.suspension_level = form.suspension_level.data
car.image = form.image.data
db.session.commit()
flash(f'Information for {car.name} has been updated.')
return redirect(url_for('main.my_cars'))
return render_template('add_product.html', title='Edit Owned Car', form=form)
flash('You do not have access to this section. Talk to the appropriate person for access.')
return redirect(url_for('main.index'))
@bp.route('/race_signup/<race_id>', methods=['GET', 'POST'])
@login_required
def race_signup(race_id):
if current_user.racer:
race = Race.query.filter_by(id=race_id).first_or_404()
if race.finalized:
flash('This race has already finished.')
return redirect(url_for('main.race_results', race_id=race.id))
if race.crew_race:
if current_user.crew_id not in ([race.defending_crew_id, race.challenging_crew_id]):
flash('You are not in a crew associated with this race. If this is an error talk to an organizer.')
return redirect(url_for('main.upcoming_races'))
if RacePerformance.query.filter_by(race_id=race.id).filter_by(user_id=current_user.id).first():
flash('You have already registered for this race.')
return redirect(url_for('main.race_info', race_id=race.id))
classes = get_available_classes(race.highest_class)
form = RaceSignupForm()
form.car.choices = [(c.id, c.name if c.name else c.car_info.name) for c in OwnedCar.query.join(Car, OwnedCar.car_id==Car.id).filter(OwnedCar.user_id==current_user.id).filter(Car.car_class.in_(classes)).all()]
if form.validate_on_submit():
car = OwnedCar.query.filter_by(id=form.car.data).first()
rp = RacePerformance(user_id=current_user.id, car_id=car.car_id,
car_details=car.id, track_id=race.track,
race_id=race.id)
db.session.add(rp)
db.session.commit()
flash(f'{car.name} has been registered for this event!')
return redirect(url_for('main.race_info', race_id=race.id))
return render_template('race_signup.html', title=f'Sign Up - {race.name}', form=form, race=race)
flash('You do not have access to this section. Talk to the appropriate person for access.')
return redirect(url_for('main.index'))
@bp.route('/racer_select/<race_id>', methods=['GET', 'POST'])
@login_required
def racer_select(race_id):
if current_user.racer:
race = Race.query.filter_by(id=race_id).first_or_404()
form = RacerSelectForm()
form.racer.choices = [(r.id, r.username) for r in User.query.filter_by(racer=True).order_by(User.username).all()]
if form.validate_on_submit():
return redirect(url_for('main.borrow_a_car', race_id=race_id, racer_id=form.racer.data))
return render_template('add_product.html', title=f'Borrow A Car - {race.name}', form=form)
flash('You do not have access to this section. Talk to the appropriate person for access.')
return redirect(url_for('main.index'))
@bp.route('/borrow_a_car/<race_id>/<racer_id>', methods=['GET', 'POST'])
@login_required
def borrow_a_car(race_id, racer_id):
if current_user.racer:
race = Race.query.filter_by(id=race_id).first_or_404()
if race.crew_race:
if current_user.crew_id not in ([race.defending_crew_id, race.challenging_crew_id]):
flash('You are not in a crew associated with this race. If this is an error talk to an organizer.')
return redirect(url_for('main.upcoming_races'))
if RacePerformance.query.filter_by(race_id=race.id).filter_by(user_id=current_user.id).first():
flash('You have already registered for this race.')
return redirect(url_for('main.race_info', race_id=race.id))
classes = get_available_classes(race.highest_class)
form = RaceSignupForm()
form.car.choices = [(c.id, c.car_info.name) for c in OwnedCar.query.join(Car, OwnedCar.car_id==Car.id).filter(OwnedCar.user_id==racer_id).filter(Car.car_class.in_(classes)).all()]
if form.validate_on_submit():
car = OwnedCar.query.filter_by(id=form.car.data).first()
rp = RacePerformance(user_id=current_user.id, car_id=car.car_id,
car_details=car.id, track_id=race.track,
race_id=race.id)
db.session.add(rp)
db.session.commit()
flash(f'{car.name} has been registered for this event!')
return redirect(url_for('main.race_info', race_id=race.id))
return render_template('add_product.html', title=f'Sign Up (Borrowing A Car) - {race.name}', form=form)
flash('You do not have access to this section. Talk to the appropriate person for access.')
return redirect(url_for('main.index'))
@bp.route('/change_registration/<race_id>', methods=['GET', 'POST'])
@login_required
def change_registration(race_id):
if current_user.racer:
race = Race.query.filter_by(id=race_id).first_or_404()
rp = RacePerformance.query.filter_by(race_id=race.id).filter_by(user_id=current_user.id).first_or_404()
classes = get_available_classes(race.highest_class)
form = RaceSignupForm()
form.car.choices = [(c.id, c.name) for c in OwnedCar.query.join(Car, OwnedCar.car_id==Car.id).filter(OwnedCar.user_id==current_user.id).filter(OwnedCar.id != rp.car_details).filter(Car.car_class.in_(classes)).all()]
if len(form.car.choices) == 0:
db.session.delete(rp)
db.session.commit()
flash("You don't have any other cars to enter the race with. Removing you from registered racers.")
return redirect(url_for('main.upcoming_races'))
if form.validate_on_submit():
if form.leave_race.data:
db.session.delete(rp)
db.session.commit()
flash(f"You have been removed from {race.name}.")
return redirect(url_for('main.upcoming_races'))
car = OwnedCar.query.filter_by(id=form.car.data).first()
rp.car_id = car.car_id
rp.car_details = car.id
db.session.commit()
flash(f'{car.name} has been chosen as your car for {race.name}!')
return redirect(url_for('main.race_info', race_id=race_id))
return render_template('add_product.html', title=f'Change Registration - {race.name}', form=form)
flash('You do not have access to this section. Talk to the appropriate person for access.')
return redirect(url_for('main.index'))
@bp.route('/race_info/<race_id>', methods=['GET', 'POST'])
@login_required
def race_info(race_id):
if current_user.racer:
race = Race.query.filter_by(id=race_id).first_or_404()
if race.finalized:
return redirect(url_for('main.race_results', race_id=race.id))
racers = race.participants.order_by(RacePerformance.start_position).all()
for racer in racers:
laps = LapTime.query.filter_by(user_id=racer.user_id).filter_by(track_id=race.track).order_by(LapTime.milliseconds).all()
racer.laps_completed = len(laps)
if len(laps) > 0:
lap_average = sum([lap.milliseconds for lap in laps]) / len(laps)
racer.lap_average = datetime.fromtimestamp(lap_average/1000.0).strftime('%M:%S.%f')[:-3]
racer.best_lap = datetime.fromtimestamp(laps[0].milliseconds/1000.0).strftime('%M:%S.%f')[:-3]
else:
racer.lap_average = 'No Data'
racer.best_lap = 'No Data'
racer.track_wins = len(RacePerformance.query.filter_by(track_id=race.track).filter_by(user_id=racer.user_id).filter_by(end_position=1).all())
racer.track_podiums = len(RacePerformance.query.filter_by(track_id=race.track).filter_by(user_id=racer.user_id).filter((RacePerformance.end_position <= 3) & (RacePerformance.end_position > 0)).all())
racer.track_completions = len(RacePerformance.query.filter_by(track_id=race.track).filter_by(user_id=racer.user_id).filter(RacePerformance.end_position != 0).all())
racer.wins = len(RacePerformance.query.filter_by(end_position=1).filter_by(user_id=racer.user_id).all())
racer.podiums = len(RacePerformance.query.filter_by(user_id=racer.user_id).filter((RacePerformance.end_position <= 3) & (RacePerformance.end_position > 0)).all())
racer.completions = len(RacePerformance.query.filter_by(user_id=racer.user_id).filter(RacePerformance.end_position != 0).all())
try:
racer_id, racer_number_wins = RacePerformance.query.with_entities(RacePerformance.user_id, func.count(RacePerformance.user_id).label('wins')).filter(RacePerformance.track_id==race.track).filter(RacePerformance.end_position==1).group_by(RacePerformance.user_id).order_by(text('wins DESC')).first()
racer_most_wins = User.query.filter_by(id=racer_id).first()
try:
racer_most_wins.lap_record = datetime.fromtimestamp(LapTime.query.filter_by(user_id=racer_most_wins.id).filter_by(track_id=race.track).order_by(LapTime.milliseconds).first().milliseconds / 1000.0).strftime('%M:%S.%f')[:-3]
except AttributeError:
racer_most_wins.lap_record = datetime.fromtimestamp(0).strftime('%M:%S.%f')[:-3]
car_id, car_number_wins = RacePerformance.query.with_entities(RacePerformance.car_id, func.count(RacePerformance.car_id).label('wins')).filter(RacePerformance.track_id==race.track).filter(RacePerformance.end_position==1).group_by(RacePerformance.car_id).order_by(text('wins DESC')).first()
car_most_wins = Car.query.filter_by(id=car_id).first()
try:
car_most_wins.lap_record = datetime.fromtimestamp(LapTime.query.filter_by(stock_id=car_most_wins.id).filter_by(track_id=race.track).order_by(LapTime.milliseconds).first().milliseconds / 1000.0).strftime('%M:%S.%f')[:-3]
except AttributeError:
car_most_wins.lap_record = datetime.fromtimestamp(0).strftime('%M:%S.%f')[:-3]
except TypeError:
racer_id, racer_number_wins = [None, None]
racer_most_wins=None
car_id, car_number_wins = [None, None]
car_most_wins = None
return render_template('race_info.html', title=f'Race - {race.name}', race=race, racers=racers,
top_racer=racer_most_wins, top_car=car_most_wins, racer_wins=racer_number_wins,
car_wins=car_number_wins)
flash('You do not have access to this section. Talk to the appropriate person for access.')
return redirect(url_for('main.index'))
@bp.route('/race_results/<race_id>', methods=['GET', 'POST'])
@login_required
def race_results(race_id):
if current_user.racer:
race = Race.query.filter_by(id=race_id).first()
racers = race.participants.filter(RacePerformance.end_position > 0).order_by(RacePerformance.end_position).all()
dnfs = race.participants.filter(RacePerformance.end_position == 0).order_by(RacePerformance.end_position).all()
for racer in racers:
racer_laps = []
laps = LapTime.query.filter_by(user_id=racer.user_id).filter_by(race_id=race.id).all()
for lap in laps:
try:
racer_laps.append(datetime.fromtimestamp(lap.milliseconds / 1000.0).strftime('%M:%S.%f')[:-3])
except OSError:
continue
except TypeError:
continue
racer.laps = racer_laps
try:
racer_id, racer_number_wins = RacePerformance.query.with_entities(RacePerformance.user_id, func.count(RacePerformance.user_id).label('wins')).filter(RacePerformance.track_id==race.track).filter(RacePerformance.end_position==1).group_by(RacePerformance.user_id).order_by(text('wins DESC')).first()
racer_most_wins = User.query.filter_by(id=racer_id).first()
car_id, car_number_wins = RacePerformance.query.with_entities(RacePerformance.car_id, func.count(RacePerformance.car_id).label('wins')).filter(RacePerformance.track_id==race.track).filter(RacePerformance.end_position==1).group_by(RacePerformance.car_id).order_by(text('wins DESC')).first()
car_most_wins = Car.query.filter_by(id=car_id).first()
except TypeError:
racer_id, racer_number_wins = [None, None]
racer_most_wins=None
car_id, car_number_wins = [None, None]
car_most_wins = None
return render_template('race_results.html', title=f'Race - {race.name}', race=race, racers=racers,
top_racer=racer_most_wins, top_car=car_most_wins, racer_wins=racer_number_wins,
car_wins=car_number_wins, dnfs=dnfs, track=Track.query.filter_by(id=race.track).first())
flash('You do not have access to this section. Talk to the appropriate person for access.')
return redirect(url_for('main.index'))
@bp.route('/transfer_vehicle/<car_id>', methods=['GET', 'POST'])
@login_required
def transfer_vehicle(car_id):
if current_user.racer:
form = RacerSelectForm()
form.racer.choices = [(r.id, r.username) for r in User.query.filter_by(racer=True).order_by(User.username).all()]
if form.validate_on_submit():
car = OwnedCar.query.filter_by(id=car_id).first_or_404()
new_owner = User.query.filter_by(id=form.racer.data).first_or_404()
car.user_id = new_owner.id
db.session.commit()
flash(f'{car.car_info.name} has been transferred to {new_owner.username}.')
return redirect(url_for('main.my_cars'))
return render_template('add_product.html', form=form, title='Transfer Vehicle')
flash('You do not have access to this section. Talk to the appropriate person for access.')
return redirect(url_for('main.index'))
@bp.route('/race_history', methods=['GET'])
@login_required
def race_history():
if current_user.racer:
races = Race.query.order_by(Race.start_time.desc()).all()
return render_template('race_history.html', races=races, rp=RacePerformance)
flash('You do not have access to this section. Talk to the appropriate person for access.')
return redirect(url_for('main.index'))
@bp.route('/achievements', methods=['GET'])
@login_required
def achievements():
if current_user.racer:
completed_achievements = [x.id for x in current_user.completed_achievements]
achievement_score = str(sum([x.point_value for x in current_user.completed_achievements]))
categories = [a.achievement_category for a in Achievement.query.with_entities(Achievement.achievement_category).order_by(Achievement.achievement_category).distinct().all()]
return render_template('achievements.html', completed_achievements=completed_achievements,
achievement_score=achievement_score,
ach_obj=Achievement, categories=categories)
flash('You do not have access to this section. Talk to the appropriate person for access.')
return redirect(url_for('main.index'))
@bp.route('/leaderboard', methods=['GET'])
@login_required
def leaderboard():
if current_user.racer:
win_info = RacePerformance.query.with_entities(User.username, func.count(RacePerformance.user_id).label('wins')).join(User, User.id == RacePerformance.user_id).filter(RacePerformance.end_position==1).group_by(User.username).order_by(text('wins DESC')).all()
winners = [x[0] for x in win_info][:10]
wins = [x[1] for x in win_info][:10]
racers = User.query.filter_by(racer=True).all()
achievement_info = []
for racer in racers:
achievement_score = sum([x.point_value for x in racer.completed_achievements])
achievement_info.append({'name': racer.username, 'score': achievement_score})
sorted_achievement_info = sorted(achievement_info, key=lambda d: d['score'], reverse=True)
achievements_filtered = [x for x in sorted_achievement_info if x['score'] > 0]
achievement_points = [x['score'] for x in sorted_achievement_info][:10]
achievement_racers = [x['name'] for x in sorted_achievement_info][:10]
return render_template('leaderboard.html', win_info=win_info, winners=winners, wins=wins,
achievement_points=achievement_points, achievement_racers=achievement_racers,
achievement_info=achievements_filtered)
flash('You do not have access to this section. Talk to the appropriate person for access.')
return redirect(url_for('main.index'))
@bp.route('/track_records', methods=['GET'])
@login_required
def track_records():
if current_user.racer:
return render_template('track_records.html', title='Track Records')
flash('You do not have access to this section. Talk to the appropriate person for access.')
return redirect(url_for('main.index'))
@bp.route('/track_records/<track_id>', methods=['GET'])
@login_required
def specific_track_record(track_id):
track = Track.query.filter_by(id=track_id).first()
outputdata = {}
car_classes = [x[0] for x in Car.query.with_entities(Car.car_class).distinct().all()]
for cc in car_classes:
outputdata[cc] = []
subquery = User.query.with_entities(User.username, Car.name, func.min(LapTime.milliseconds).filter(Car.car_class == cc).filter(LapTime.track_id==track_id).label('milliseconds')).join(LapTime, User.id==LapTime.user_id).join(Car, Car.id==LapTime.stock_id).group_by(User.username, Car.name).order_by(text('milliseconds')).subquery()
lap_times = db.session.query(subquery).filter(subquery.c.milliseconds != None).all()
for lt in lap_times:
outputdata[cc].append(lt)
if current_user.racer:
return render_template('specific_track_record.html', title='Track Records', lap_times=lap_times,
track=track)
flash('You do not have access to this section. Talk to the appropriate person for access.')
return redirect(url_for('main.index'))
@bp.route('/send_encrypted_message', methods=['GET', 'POST'])
@login_required
def send_encrypted_message():
if current_user.race_lead or current_user.race_host:
form = EncryptedMessageForm()
if form.validate_on_submit():
post_encrypted_message(form)
flash('Your message has been sent.')
return redirect(url_for('main.send_encrypted_message'))
return render_template('add_product.html', title='Send Encrypted Message', form=form)
flash('You do not have access to this section. Talk to the appropriate person for access.')
return redirect(url_for('main.index'))
@bp.route('/messages')
@login_required
def messages():
current_user.last_message_read_time = datetime.utcnow()
current_user.add_notification('unread_message_count', 0)
db.session.commit()
page = request.args.get('page', 1, type=int)
messages = current_user.messages_received.order_by(
Message.timestamp.desc()).paginate(
page, current_app.config['POSTS_PER_PAGE'], False)
next_url = url_for('main.messages', page=messages.next_num) \
if messages.has_next else None
prev_url = url_for('main.messages', page=messages.prev_num) \
if messages.has_prev else None
return render_template('messages.html', messages=messages.items,
next_url=next_url, prev_url=prev_url)
@bp.route('/notifications')
@login_required
def notifications():
since = request.args.get('since', 0.0, type=float)
notifications = current_user.notifications.filter(
Notification.timestamp > since).order_by(Notification.timestamp.asc())
return jsonify([{
'name': n.name,
'data': n.get_data(),
'timestamp': n.timestamp
} for n in notifications])
# END RACER SECTION
# END RACE SECTION | 49.52069 | 337 | 0.65838 |
154289d8283c62ce9ebd0c4d249a6182619a74b0 | 943 | py | Python | src/check_if_valid_url.py | pranavr2003/selenium-simple | 4b7fabf6915fdb8488d60c6641260e0d80b58635 | [
"WTFPL"
] | null | null | null | src/check_if_valid_url.py | pranavr2003/selenium-simple | 4b7fabf6915fdb8488d60c6641260e0d80b58635 | [
"WTFPL"
] | null | null | null | src/check_if_valid_url.py | pranavr2003/selenium-simple | 4b7fabf6915fdb8488d60c6641260e0d80b58635 | [
"WTFPL"
] | null | null | null | from use_regex_to_filter_url import filter_stuff
import warnings
# ex_str = r'sel-simple/|https://www.python.org/|/^assert "Python" in title^/?febn=q/c/k-pycon?K-RETURN/}'
def check_if_valid_url(url):
"""Checks if the URL is in the correct format
"""
getting_route_data = filter_stuff(url)
route = getting_route_data[0]
url_to_use = getting_route_data[1]
# print(route, url_to_use)
if route.startswith('sel-simple'):
pass
else:
print(warnings.WarningMessage("The route must start with 'sel-simple/|URL|...'", UserWarning, 'check_if_valid_url.py', 12))
return [route.split('/'), url_to_use]
# print(check_if_valid_url(ex_str))
# print(check_if_valid_url(r'sel-simple/|google.us|/^from selenium import webdriver^/^driver = webdriver.Chrome()^/#user-data-dir=C:\Users\SONY\AppData\Local\Google\Chrome\User Data/#profile-directory=Default/^print("yeet")^/?febn=q/c/k-pycon?K-RETURN/}'))
| 36.269231 | 256 | 0.709438 |
893c8e2af25397eab4bd0597130fd856d8b954ae | 3,742 | py | Python | app/auth/views.py | lin-wish/random-name | 91bae70aad4547e06388105136573a7c18525ed0 | [
"MIT"
] | null | null | null | app/auth/views.py | lin-wish/random-name | 91bae70aad4547e06388105136573a7c18525ed0 | [
"MIT"
] | null | null | null | app/auth/views.py | lin-wish/random-name | 91bae70aad4547e06388105136573a7c18525ed0 | [
"MIT"
] | null | null | null | from . import auth_blueprint
from flask.views import MethodView
from flask import make_response, request, jsonify
from app.models import User
class RegistrationView(MethodView):
"""This class registers a new user."""
def post(self):
"""Handle POST request for this view. Url ---> /auth/register"""
# Query to see if the user already exists
user = User.query.filter_by(email=request.data['email']).first()
if not user:
# There is no user so we'll try to register them
try:
post_data = request.data
# Register the user
email = post_data['email']
password = post_data['password']
user = User(email=email, password=password)
user.save()
response = {
'message': 'You registered successfully. Please log in.'
}
# return a response notifying the user that they registered successfully
return make_response(jsonify(response)), 201
except Exception as e:
# An error occured, therefore return a string message containing the error
response = {
'message': str(e)
}
return make_response(jsonify(response)), 401
else:
# There is an existing user. We don't want to register users twice
# Return a message to the user telling them that they they already exist
response = {
'message': 'User already exists. Please login.'
}
return make_response(jsonify(response)), 202
class LoginView(MethodView):
"""This class-based view handles user login and access token generation."""
def post(self):
"""Handle POST request for this view. Url ---> /auth/login"""
try:
# Get the user object using their email (unique to every user)
user = User.query.filter_by(email=request.data['email']).first()
# Try to authenticate the found user using their password
if user and user.password_is_valid(request.data['password']):
# Generate the access token. This will be used as the authorization header
access_token = user.generate_token(user.id)
if access_token:
response = {
'message': 'You logged in successfully.',
'access_token': access_token.decode()
}
return make_response(jsonify(response)), 200
else:
# User does not exist. Therefore, we return an error message
response = {
'message': 'Invalid email or password, Please try again'
}
return make_response(jsonify(response)), 401
except Exception as e:
# Create a response containing an string error message
response = {
'message': str(e)
}
# Return a server error using the HTTP Error Code 500 (Internal Server Error)
return make_response(jsonify(response)), 500
registration_view = RegistrationView.as_view('register_view')
login_view = LoginView.as_view('login_view')
# Define the rule for the registration url ---> /auth/register
# Then add the rule to the blueprint
auth_blueprint.add_url_rule(
'/auth/register',
view_func=registration_view,
methods=['POST'])
auth_blueprint.add_url_rule(
'/auth/login',
view_func=login_view,
methods=['POST']
) | 39.389474 | 91 | 0.566809 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.